H", tag_data[:2])[0]):
+ ifd_tag, typ, count, data = struct.unpack(
+ ">HHL4s", tag_data[i * 12 + 2 : (i + 1) * 12 + 2]
+ )
+ if ifd_tag == 0x1101:
+ # CameraInfo
+ (offset,) = struct.unpack(">L", data)
+ self.fp.seek(offset)
+
+ camerainfo = {"ModelID": self.fp.read(4)}
+
+ self.fp.read(4)
+ # Seconds since 2000
+ camerainfo["TimeStamp"] = i32le(self.fp.read(12))
+
+ self.fp.read(4)
+ camerainfo["InternalSerialNumber"] = self.fp.read(4)
+
+ self.fp.read(12)
+ parallax = self.fp.read(4)
+ handler = ImageFileDirectory_v2._load_dispatch[
+ TiffTags.FLOAT
+ ][1]
+ camerainfo["Parallax"] = handler(
+ ImageFileDirectory_v2(), parallax, False
+ )
+
+ self.fp.read(4)
+ camerainfo["Category"] = self.fp.read(2)
+
+ makernote = {0x1101: dict(self._fixup_dict(camerainfo))}
+ self._ifds[tag] = makernote
+ else:
+ # Interop
+ self._ifds[tag] = self._get_ifd_dict(tag_data)
+ ifd = self._ifds.get(tag, {})
+ if tag == ExifTags.IFD.Exif and self._hidden_data:
+ ifd = {
+ k: v
+ for (k, v) in ifd.items()
+ if k not in (ExifTags.IFD.Interop, ExifTags.IFD.Makernote)
+ }
+ return ifd
+
+ def hide_offsets(self):
+ for tag in (ExifTags.IFD.Exif, ExifTags.IFD.GPSInfo):
+ if tag in self:
+ self._hidden_data[tag] = self[tag]
+ del self[tag]
+
+ def __str__(self):
+ if self._info is not None:
+ # Load all keys into self._data
+ for tag in self._info:
+ self[tag]
+
+ return str(self._data)
+
+ def __len__(self):
+ keys = set(self._data)
+ if self._info is not None:
+ keys.update(self._info)
+ return len(keys)
+
+ def __getitem__(self, tag):
+ if self._info is not None and tag not in self._data and tag in self._info:
+ self._data[tag] = self._fixup(self._info[tag])
+ del self._info[tag]
+ return self._data[tag]
+
+ def __contains__(self, tag):
+ return tag in self._data or (self._info is not None and tag in self._info)
+
+ def __setitem__(self, tag, value):
+ if self._info is not None and tag in self._info:
+ del self._info[tag]
+ self._data[tag] = value
+
+ def __delitem__(self, tag):
+ if self._info is not None and tag in self._info:
+ del self._info[tag]
+ else:
+ del self._data[tag]
+
+ def __iter__(self):
+ keys = set(self._data)
+ if self._info is not None:
+ keys.update(self._info)
+ return iter(keys)
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageChops.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageChops.py
new file mode 100644
index 00000000..70120031
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageChops.py
@@ -0,0 +1,303 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# standard channel operations
+#
+# History:
+# 1996-03-24 fl Created
+# 1996-08-13 fl Added logical operations (for "1" images)
+# 2000-10-12 fl Added offset method (from Image.py)
+#
+# Copyright (c) 1997-2000 by Secret Labs AB
+# Copyright (c) 1996-2000 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+from . import Image
+
+
+def constant(image, value):
+ """Fill a channel with a given grey level.
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ return Image.new("L", image.size, value)
+
+
+def duplicate(image):
+ """Copy a channel. Alias for :py:meth:`PIL.Image.Image.copy`.
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ return image.copy()
+
+
+def invert(image):
+ """
+ Invert an image (channel). ::
+
+ out = MAX - image
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image.load()
+ return image._new(image.im.chop_invert())
+
+
+def lighter(image1, image2):
+ """
+ Compares the two images, pixel by pixel, and returns a new image containing
+ the lighter values. ::
+
+ out = max(image1, image2)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_lighter(image2.im))
+
+
+def darker(image1, image2):
+ """
+ Compares the two images, pixel by pixel, and returns a new image containing
+ the darker values. ::
+
+ out = min(image1, image2)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_darker(image2.im))
+
+
+def difference(image1, image2):
+ """
+ Returns the absolute value of the pixel-by-pixel difference between the two
+ images. ::
+
+ out = abs(image1 - image2)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_difference(image2.im))
+
+
+def multiply(image1, image2):
+ """
+ Superimposes two images on top of each other.
+
+ If you multiply an image with a solid black image, the result is black. If
+ you multiply with a solid white image, the image is unaffected. ::
+
+ out = image1 * image2 / MAX
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_multiply(image2.im))
+
+
+def screen(image1, image2):
+ """
+ Superimposes two inverted images on top of each other. ::
+
+ out = MAX - ((MAX - image1) * (MAX - image2) / MAX)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_screen(image2.im))
+
+
+def soft_light(image1, image2):
+ """
+ Superimposes two images on top of each other using the Soft Light algorithm
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_soft_light(image2.im))
+
+
+def hard_light(image1, image2):
+ """
+ Superimposes two images on top of each other using the Hard Light algorithm
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_hard_light(image2.im))
+
+
+def overlay(image1, image2):
+ """
+ Superimposes two images on top of each other using the Overlay algorithm
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_overlay(image2.im))
+
+
+def add(image1, image2, scale=1.0, offset=0):
+ """
+ Adds two images, dividing the result by scale and adding the
+ offset. If omitted, scale defaults to 1.0, and offset to 0.0. ::
+
+ out = ((image1 + image2) / scale + offset)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_add(image2.im, scale, offset))
+
+
+def subtract(image1, image2, scale=1.0, offset=0):
+ """
+ Subtracts two images, dividing the result by scale and adding the offset.
+ If omitted, scale defaults to 1.0, and offset to 0.0. ::
+
+ out = ((image1 - image2) / scale + offset)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_subtract(image2.im, scale, offset))
+
+
+def add_modulo(image1, image2):
+ """Add two images, without clipping the result. ::
+
+ out = ((image1 + image2) % MAX)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_add_modulo(image2.im))
+
+
+def subtract_modulo(image1, image2):
+ """Subtract two images, without clipping the result. ::
+
+ out = ((image1 - image2) % MAX)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_subtract_modulo(image2.im))
+
+
+def logical_and(image1, image2):
+ """Logical AND between two images.
+
+ Both of the images must have mode "1". If you would like to perform a
+ logical AND on an image with a mode other than "1", try
+ :py:meth:`~PIL.ImageChops.multiply` instead, using a black-and-white mask
+ as the second image. ::
+
+ out = ((image1 and image2) % MAX)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_and(image2.im))
+
+
+def logical_or(image1, image2):
+ """Logical OR between two images.
+
+ Both of the images must have mode "1". ::
+
+ out = ((image1 or image2) % MAX)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_or(image2.im))
+
+
+def logical_xor(image1, image2):
+ """Logical XOR between two images.
+
+ Both of the images must have mode "1". ::
+
+ out = ((bool(image1) != bool(image2)) % MAX)
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ image1.load()
+ image2.load()
+ return image1._new(image1.im.chop_xor(image2.im))
+
+
+def blend(image1, image2, alpha):
+ """Blend images using constant transparency weight. Alias for
+ :py:func:`PIL.Image.blend`.
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ return Image.blend(image1, image2, alpha)
+
+
+def composite(image1, image2, mask):
+ """Create composite using transparency mask. Alias for
+ :py:func:`PIL.Image.composite`.
+
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ return Image.composite(image1, image2, mask)
+
+
+def offset(image, xoffset, yoffset=None):
+ """Returns a copy of the image where data has been offset by the given
+ distances. Data wraps around the edges. If ``yoffset`` is omitted, it
+ is assumed to be equal to ``xoffset``.
+
+ :param image: Input image.
+ :param xoffset: The horizontal distance.
+ :param yoffset: The vertical distance. If omitted, both
+ distances are set to the same value.
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+
+ if yoffset is None:
+ yoffset = xoffset
+ image.load()
+ return image._new(image.im.offset(xoffset, yoffset))
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageCms.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageCms.py
new file mode 100644
index 00000000..3a337f9f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageCms.py
@@ -0,0 +1,1009 @@
+# The Python Imaging Library.
+# $Id$
+
+# Optional color management support, based on Kevin Cazabon's PyCMS
+# library.
+
+# History:
+
+# 2009-03-08 fl Added to PIL.
+
+# Copyright (C) 2002-2003 Kevin Cazabon
+# Copyright (c) 2009 by Fredrik Lundh
+# Copyright (c) 2013 by Eric Soroos
+
+# See the README file for information on usage and redistribution. See
+# below for the original description.
+
+import sys
+from enum import IntEnum
+
+from . import Image
+
+try:
+ from . import _imagingcms
+except ImportError as ex:
+ # Allow error import for doc purposes, but error out when accessing
+ # anything in core.
+ from ._util import DeferredError
+
+ _imagingcms = DeferredError(ex)
+
+DESCRIPTION = """
+pyCMS
+
+ a Python / PIL interface to the littleCMS ICC Color Management System
+ Copyright (C) 2002-2003 Kevin Cazabon
+ kevin@cazabon.com
+ https://www.cazabon.com
+
+ pyCMS home page: https://www.cazabon.com/pyCMS
+ littleCMS home page: https://www.littlecms.com
+ (littleCMS is Copyright (C) 1998-2001 Marti Maria)
+
+ Originally released under LGPL. Graciously donated to PIL in
+ March 2009, for distribution under the standard PIL license
+
+ The pyCMS.py module provides a "clean" interface between Python/PIL and
+ pyCMSdll, taking care of some of the more complex handling of the direct
+ pyCMSdll functions, as well as error-checking and making sure that all
+ relevant data is kept together.
+
+ While it is possible to call pyCMSdll functions directly, it's not highly
+ recommended.
+
+ Version History:
+
+ 1.0.0 pil Oct 2013 Port to LCMS 2.
+
+ 0.1.0 pil mod March 10, 2009
+
+ Renamed display profile to proof profile. The proof
+ profile is the profile of the device that is being
+ simulated, not the profile of the device which is
+ actually used to display/print the final simulation
+ (that'd be the output profile) - also see LCMSAPI.txt
+ input colorspace -> using 'renderingIntent' -> proof
+ colorspace -> using 'proofRenderingIntent' -> output
+ colorspace
+
+ Added LCMS FLAGS support.
+ Added FLAGS["SOFTPROOFING"] as default flag for
+ buildProofTransform (otherwise the proof profile/intent
+ would be ignored).
+
+ 0.1.0 pil March 2009 - added to PIL, as PIL.ImageCms
+
+ 0.0.2 alpha Jan 6, 2002
+
+ Added try/except statements around type() checks of
+ potential CObjects... Python won't let you use type()
+ on them, and raises a TypeError (stupid, if you ask
+ me!)
+
+ Added buildProofTransformFromOpenProfiles() function.
+ Additional fixes in DLL, see DLL code for details.
+
+ 0.0.1 alpha first public release, Dec. 26, 2002
+
+ Known to-do list with current version (of Python interface, not pyCMSdll):
+
+ none
+
+"""
+
+VERSION = "1.0.0 pil"
+
+# --------------------------------------------------------------------.
+
+core = _imagingcms
+
+#
+# intent/direction values
+
+
+class Intent(IntEnum):
+ PERCEPTUAL = 0
+ RELATIVE_COLORIMETRIC = 1
+ SATURATION = 2
+ ABSOLUTE_COLORIMETRIC = 3
+
+
+class Direction(IntEnum):
+ INPUT = 0
+ OUTPUT = 1
+ PROOF = 2
+
+
+#
+# flags
+
+FLAGS = {
+ "MATRIXINPUT": 1,
+ "MATRIXOUTPUT": 2,
+ "MATRIXONLY": (1 | 2),
+ "NOWHITEONWHITEFIXUP": 4, # Don't hot fix scum dot
+ # Don't create prelinearization tables on precalculated transforms
+ # (internal use):
+ "NOPRELINEARIZATION": 16,
+ "GUESSDEVICECLASS": 32, # Guess device class (for transform2devicelink)
+ "NOTCACHE": 64, # Inhibit 1-pixel cache
+ "NOTPRECALC": 256,
+ "NULLTRANSFORM": 512, # Don't transform anyway
+ "HIGHRESPRECALC": 1024, # Use more memory to give better accuracy
+ "LOWRESPRECALC": 2048, # Use less memory to minimize resources
+ "WHITEBLACKCOMPENSATION": 8192,
+ "BLACKPOINTCOMPENSATION": 8192,
+ "GAMUTCHECK": 4096, # Out of Gamut alarm
+ "SOFTPROOFING": 16384, # Do softproofing
+ "PRESERVEBLACK": 32768, # Black preservation
+ "NODEFAULTRESOURCEDEF": 16777216, # CRD special
+ "GRIDPOINTS": lambda n: (n & 0xFF) << 16, # Gridpoints
+}
+
+_MAX_FLAG = 0
+for flag in FLAGS.values():
+ if isinstance(flag, int):
+ _MAX_FLAG = _MAX_FLAG | flag
+
+
+# --------------------------------------------------------------------.
+# Experimental PIL-level API
+# --------------------------------------------------------------------.
+
+##
+# Profile.
+
+
+class ImageCmsProfile:
+ def __init__(self, profile):
+ """
+ :param profile: Either a string representing a filename,
+ a file like object containing a profile or a
+ low-level profile object
+
+ """
+
+ if isinstance(profile, str):
+ if sys.platform == "win32":
+ profile_bytes_path = profile.encode()
+ try:
+ profile_bytes_path.decode("ascii")
+ except UnicodeDecodeError:
+ with open(profile, "rb") as f:
+ self._set(core.profile_frombytes(f.read()))
+ return
+ self._set(core.profile_open(profile), profile)
+ elif hasattr(profile, "read"):
+ self._set(core.profile_frombytes(profile.read()))
+ elif isinstance(profile, _imagingcms.CmsProfile):
+ self._set(profile)
+ else:
+ msg = "Invalid type for Profile"
+ raise TypeError(msg)
+
+ def _set(self, profile, filename=None):
+ self.profile = profile
+ self.filename = filename
+ self.product_name = None # profile.product_name
+ self.product_info = None # profile.product_info
+
+ def tobytes(self):
+ """
+ Returns the profile in a format suitable for embedding in
+ saved images.
+
+ :returns: a bytes object containing the ICC profile.
+ """
+
+ return core.profile_tobytes(self.profile)
+
+
+class ImageCmsTransform(Image.ImagePointHandler):
+
+ """
+ Transform. This can be used with the procedural API, or with the standard
+ :py:func:`~PIL.Image.Image.point` method.
+
+ Will return the output profile in the ``output.info['icc_profile']``.
+ """
+
+ def __init__(
+ self,
+ input,
+ output,
+ input_mode,
+ output_mode,
+ intent=Intent.PERCEPTUAL,
+ proof=None,
+ proof_intent=Intent.ABSOLUTE_COLORIMETRIC,
+ flags=0,
+ ):
+ if proof is None:
+ self.transform = core.buildTransform(
+ input.profile, output.profile, input_mode, output_mode, intent, flags
+ )
+ else:
+ self.transform = core.buildProofTransform(
+ input.profile,
+ output.profile,
+ proof.profile,
+ input_mode,
+ output_mode,
+ intent,
+ proof_intent,
+ flags,
+ )
+ # Note: inputMode and outputMode are for pyCMS compatibility only
+ self.input_mode = self.inputMode = input_mode
+ self.output_mode = self.outputMode = output_mode
+
+ self.output_profile = output
+
+ def point(self, im):
+ return self.apply(im)
+
+ def apply(self, im, imOut=None):
+ im.load()
+ if imOut is None:
+ imOut = Image.new(self.output_mode, im.size, None)
+ self.transform.apply(im.im.id, imOut.im.id)
+ imOut.info["icc_profile"] = self.output_profile.tobytes()
+ return imOut
+
+ def apply_in_place(self, im):
+ im.load()
+ if im.mode != self.output_mode:
+ msg = "mode mismatch"
+ raise ValueError(msg) # wrong output mode
+ self.transform.apply(im.im.id, im.im.id)
+ im.info["icc_profile"] = self.output_profile.tobytes()
+ return im
+
+
+def get_display_profile(handle=None):
+ """
+ (experimental) Fetches the profile for the current display device.
+
+ :returns: ``None`` if the profile is not known.
+ """
+
+ if sys.platform != "win32":
+ return None
+
+ from . import ImageWin
+
+ if isinstance(handle, ImageWin.HDC):
+ profile = core.get_display_profile_win32(handle, 1)
+ else:
+ profile = core.get_display_profile_win32(handle or 0)
+ if profile is None:
+ return None
+ return ImageCmsProfile(profile)
+
+
+# --------------------------------------------------------------------.
+# pyCMS compatible layer
+# --------------------------------------------------------------------.
+
+
+class PyCMSError(Exception):
+
+ """(pyCMS) Exception class.
+ This is used for all errors in the pyCMS API."""
+
+ pass
+
+
+def profileToProfile(
+ im,
+ inputProfile,
+ outputProfile,
+ renderingIntent=Intent.PERCEPTUAL,
+ outputMode=None,
+ inPlace=False,
+ flags=0,
+):
+ """
+ (pyCMS) Applies an ICC transformation to a given image, mapping from
+ ``inputProfile`` to ``outputProfile``.
+
+ If the input or output profiles specified are not valid filenames, a
+ :exc:`PyCMSError` will be raised. If ``inPlace`` is ``True`` and
+ ``outputMode != im.mode``, a :exc:`PyCMSError` will be raised.
+ If an error occurs during application of the profiles,
+ a :exc:`PyCMSError` will be raised.
+ If ``outputMode`` is not a mode supported by the ``outputProfile`` (or by pyCMS),
+ a :exc:`PyCMSError` will be raised.
+
+ This function applies an ICC transformation to im from ``inputProfile``'s
+ color space to ``outputProfile``'s color space using the specified rendering
+ intent to decide how to handle out-of-gamut colors.
+
+ ``outputMode`` can be used to specify that a color mode conversion is to
+ be done using these profiles, but the specified profiles must be able
+ to handle that mode. I.e., if converting im from RGB to CMYK using
+ profiles, the input profile must handle RGB data, and the output
+ profile must handle CMYK data.
+
+ :param im: An open :py:class:`~PIL.Image.Image` object (i.e. Image.new(...)
+ or Image.open(...), etc.)
+ :param inputProfile: String, as a valid filename path to the ICC input
+ profile you wish to use for this image, or a profile object
+ :param outputProfile: String, as a valid filename path to the ICC output
+ profile you wish to use for this image, or a profile object
+ :param renderingIntent: Integer (0-3) specifying the rendering intent you
+ wish to use for the transform
+
+ ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT)
+ ImageCms.Intent.RELATIVE_COLORIMETRIC = 1
+ ImageCms.Intent.SATURATION = 2
+ ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3
+
+ see the pyCMS documentation for details on rendering intents and what
+ they do.
+ :param outputMode: A valid PIL mode for the output image (i.e. "RGB",
+ "CMYK", etc.). Note: if rendering the image "inPlace", outputMode
+ MUST be the same mode as the input, or omitted completely. If
+ omitted, the outputMode will be the same as the mode of the input
+ image (im.mode)
+ :param inPlace: Boolean. If ``True``, the original image is modified in-place,
+ and ``None`` is returned. If ``False`` (default), a new
+ :py:class:`~PIL.Image.Image` object is returned with the transform applied.
+ :param flags: Integer (0-...) specifying additional flags
+ :returns: Either None or a new :py:class:`~PIL.Image.Image` object, depending on
+ the value of ``inPlace``
+ :exception PyCMSError:
+ """
+
+ if outputMode is None:
+ outputMode = im.mode
+
+ if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3):
+ msg = "renderingIntent must be an integer between 0 and 3"
+ raise PyCMSError(msg)
+
+ if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG):
+ msg = f"flags must be an integer between 0 and {_MAX_FLAG}"
+ raise PyCMSError(msg)
+
+ try:
+ if not isinstance(inputProfile, ImageCmsProfile):
+ inputProfile = ImageCmsProfile(inputProfile)
+ if not isinstance(outputProfile, ImageCmsProfile):
+ outputProfile = ImageCmsProfile(outputProfile)
+ transform = ImageCmsTransform(
+ inputProfile,
+ outputProfile,
+ im.mode,
+ outputMode,
+ renderingIntent,
+ flags=flags,
+ )
+ if inPlace:
+ transform.apply_in_place(im)
+ imOut = None
+ else:
+ imOut = transform.apply(im)
+ except (OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+ return imOut
+
+
+def getOpenProfile(profileFilename):
+ """
+ (pyCMS) Opens an ICC profile file.
+
+ The PyCMSProfile object can be passed back into pyCMS for use in creating
+ transforms and such (as in ImageCms.buildTransformFromOpenProfiles()).
+
+ If ``profileFilename`` is not a valid filename for an ICC profile,
+ a :exc:`PyCMSError` will be raised.
+
+ :param profileFilename: String, as a valid filename path to the ICC profile
+ you wish to open, or a file-like object.
+ :returns: A CmsProfile class object.
+ :exception PyCMSError:
+ """
+
+ try:
+ return ImageCmsProfile(profileFilename)
+ except (OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def buildTransform(
+ inputProfile,
+ outputProfile,
+ inMode,
+ outMode,
+ renderingIntent=Intent.PERCEPTUAL,
+ flags=0,
+):
+ """
+ (pyCMS) Builds an ICC transform mapping from the ``inputProfile`` to the
+ ``outputProfile``. Use applyTransform to apply the transform to a given
+ image.
+
+ If the input or output profiles specified are not valid filenames, a
+ :exc:`PyCMSError` will be raised. If an error occurs during creation
+ of the transform, a :exc:`PyCMSError` will be raised.
+
+ If ``inMode`` or ``outMode`` are not a mode supported by the ``outputProfile``
+ (or by pyCMS), a :exc:`PyCMSError` will be raised.
+
+ This function builds and returns an ICC transform from the ``inputProfile``
+ to the ``outputProfile`` using the ``renderingIntent`` to determine what to do
+ with out-of-gamut colors. It will ONLY work for converting images that
+ are in ``inMode`` to images that are in ``outMode`` color format (PIL mode,
+ i.e. "RGB", "RGBA", "CMYK", etc.).
+
+ Building the transform is a fair part of the overhead in
+ ImageCms.profileToProfile(), so if you're planning on converting multiple
+ images using the same input/output settings, this can save you time.
+ Once you have a transform object, it can be used with
+ ImageCms.applyProfile() to convert images without the need to re-compute
+ the lookup table for the transform.
+
+ The reason pyCMS returns a class object rather than a handle directly
+ to the transform is that it needs to keep track of the PIL input/output
+ modes that the transform is meant for. These attributes are stored in
+ the ``inMode`` and ``outMode`` attributes of the object (which can be
+ manually overridden if you really want to, but I don't know of any
+ time that would be of use, or would even work).
+
+ :param inputProfile: String, as a valid filename path to the ICC input
+ profile you wish to use for this transform, or a profile object
+ :param outputProfile: String, as a valid filename path to the ICC output
+ profile you wish to use for this transform, or a profile object
+ :param inMode: String, as a valid PIL mode that the appropriate profile
+ also supports (i.e. "RGB", "RGBA", "CMYK", etc.)
+ :param outMode: String, as a valid PIL mode that the appropriate profile
+ also supports (i.e. "RGB", "RGBA", "CMYK", etc.)
+ :param renderingIntent: Integer (0-3) specifying the rendering intent you
+ wish to use for the transform
+
+ ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT)
+ ImageCms.Intent.RELATIVE_COLORIMETRIC = 1
+ ImageCms.Intent.SATURATION = 2
+ ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3
+
+ see the pyCMS documentation for details on rendering intents and what
+ they do.
+ :param flags: Integer (0-...) specifying additional flags
+ :returns: A CmsTransform class object.
+ :exception PyCMSError:
+ """
+
+ if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3):
+ msg = "renderingIntent must be an integer between 0 and 3"
+ raise PyCMSError(msg)
+
+ if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG):
+ msg = "flags must be an integer between 0 and %s" + _MAX_FLAG
+ raise PyCMSError(msg)
+
+ try:
+ if not isinstance(inputProfile, ImageCmsProfile):
+ inputProfile = ImageCmsProfile(inputProfile)
+ if not isinstance(outputProfile, ImageCmsProfile):
+ outputProfile = ImageCmsProfile(outputProfile)
+ return ImageCmsTransform(
+ inputProfile, outputProfile, inMode, outMode, renderingIntent, flags=flags
+ )
+ except (OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def buildProofTransform(
+ inputProfile,
+ outputProfile,
+ proofProfile,
+ inMode,
+ outMode,
+ renderingIntent=Intent.PERCEPTUAL,
+ proofRenderingIntent=Intent.ABSOLUTE_COLORIMETRIC,
+ flags=FLAGS["SOFTPROOFING"],
+):
+ """
+ (pyCMS) Builds an ICC transform mapping from the ``inputProfile`` to the
+ ``outputProfile``, but tries to simulate the result that would be
+ obtained on the ``proofProfile`` device.
+
+ If the input, output, or proof profiles specified are not valid
+ filenames, a :exc:`PyCMSError` will be raised.
+
+ If an error occurs during creation of the transform,
+ a :exc:`PyCMSError` will be raised.
+
+ If ``inMode`` or ``outMode`` are not a mode supported by the ``outputProfile``
+ (or by pyCMS), a :exc:`PyCMSError` will be raised.
+
+ This function builds and returns an ICC transform from the ``inputProfile``
+ to the ``outputProfile``, but tries to simulate the result that would be
+ obtained on the ``proofProfile`` device using ``renderingIntent`` and
+ ``proofRenderingIntent`` to determine what to do with out-of-gamut
+ colors. This is known as "soft-proofing". It will ONLY work for
+ converting images that are in ``inMode`` to images that are in outMode
+ color format (PIL mode, i.e. "RGB", "RGBA", "CMYK", etc.).
+
+ Usage of the resulting transform object is exactly the same as with
+ ImageCms.buildTransform().
+
+ Proof profiling is generally used when using an output device to get a
+ good idea of what the final printed/displayed image would look like on
+ the ``proofProfile`` device when it's quicker and easier to use the
+ output device for judging color. Generally, this means that the
+ output device is a monitor, or a dye-sub printer (etc.), and the simulated
+ device is something more expensive, complicated, or time consuming
+ (making it difficult to make a real print for color judgement purposes).
+
+ Soft-proofing basically functions by adjusting the colors on the
+ output device to match the colors of the device being simulated. However,
+ when the simulated device has a much wider gamut than the output
+ device, you may obtain marginal results.
+
+ :param inputProfile: String, as a valid filename path to the ICC input
+ profile you wish to use for this transform, or a profile object
+ :param outputProfile: String, as a valid filename path to the ICC output
+ (monitor, usually) profile you wish to use for this transform, or a
+ profile object
+ :param proofProfile: String, as a valid filename path to the ICC proof
+ profile you wish to use for this transform, or a profile object
+ :param inMode: String, as a valid PIL mode that the appropriate profile
+ also supports (i.e. "RGB", "RGBA", "CMYK", etc.)
+ :param outMode: String, as a valid PIL mode that the appropriate profile
+ also supports (i.e. "RGB", "RGBA", "CMYK", etc.)
+ :param renderingIntent: Integer (0-3) specifying the rendering intent you
+ wish to use for the input->proof (simulated) transform
+
+ ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT)
+ ImageCms.Intent.RELATIVE_COLORIMETRIC = 1
+ ImageCms.Intent.SATURATION = 2
+ ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3
+
+ see the pyCMS documentation for details on rendering intents and what
+ they do.
+ :param proofRenderingIntent: Integer (0-3) specifying the rendering intent
+ you wish to use for proof->output transform
+
+ ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT)
+ ImageCms.Intent.RELATIVE_COLORIMETRIC = 1
+ ImageCms.Intent.SATURATION = 2
+ ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3
+
+ see the pyCMS documentation for details on rendering intents and what
+ they do.
+ :param flags: Integer (0-...) specifying additional flags
+ :returns: A CmsTransform class object.
+ :exception PyCMSError:
+ """
+
+ if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3):
+ msg = "renderingIntent must be an integer between 0 and 3"
+ raise PyCMSError(msg)
+
+ if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG):
+ msg = "flags must be an integer between 0 and %s" + _MAX_FLAG
+ raise PyCMSError(msg)
+
+ try:
+ if not isinstance(inputProfile, ImageCmsProfile):
+ inputProfile = ImageCmsProfile(inputProfile)
+ if not isinstance(outputProfile, ImageCmsProfile):
+ outputProfile = ImageCmsProfile(outputProfile)
+ if not isinstance(proofProfile, ImageCmsProfile):
+ proofProfile = ImageCmsProfile(proofProfile)
+ return ImageCmsTransform(
+ inputProfile,
+ outputProfile,
+ inMode,
+ outMode,
+ renderingIntent,
+ proofProfile,
+ proofRenderingIntent,
+ flags,
+ )
+ except (OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+buildTransformFromOpenProfiles = buildTransform
+buildProofTransformFromOpenProfiles = buildProofTransform
+
+
+def applyTransform(im, transform, inPlace=False):
+ """
+ (pyCMS) Applies a transform to a given image.
+
+ If ``im.mode != transform.inMode``, a :exc:`PyCMSError` is raised.
+
+ If ``inPlace`` is ``True`` and ``transform.inMode != transform.outMode``, a
+ :exc:`PyCMSError` is raised.
+
+ If ``im.mode``, ``transform.inMode`` or ``transform.outMode`` is not
+ supported by pyCMSdll or the profiles you used for the transform, a
+ :exc:`PyCMSError` is raised.
+
+ If an error occurs while the transform is being applied,
+ a :exc:`PyCMSError` is raised.
+
+ This function applies a pre-calculated transform (from
+ ImageCms.buildTransform() or ImageCms.buildTransformFromOpenProfiles())
+ to an image. The transform can be used for multiple images, saving
+ considerable calculation time if doing the same conversion multiple times.
+
+ If you want to modify im in-place instead of receiving a new image as
+ the return value, set ``inPlace`` to ``True``. This can only be done if
+ ``transform.inMode`` and ``transform.outMode`` are the same, because we can't
+ change the mode in-place (the buffer sizes for some modes are
+ different). The default behavior is to return a new :py:class:`~PIL.Image.Image`
+ object of the same dimensions in mode ``transform.outMode``.
+
+ :param im: An :py:class:`~PIL.Image.Image` object, and im.mode must be the same
+ as the ``inMode`` supported by the transform.
+ :param transform: A valid CmsTransform class object
+ :param inPlace: Bool. If ``True``, ``im`` is modified in place and ``None`` is
+ returned, if ``False``, a new :py:class:`~PIL.Image.Image` object with the
+ transform applied is returned (and ``im`` is not changed). The default is
+ ``False``.
+ :returns: Either ``None``, or a new :py:class:`~PIL.Image.Image` object,
+ depending on the value of ``inPlace``. The profile will be returned in
+ the image's ``info['icc_profile']``.
+ :exception PyCMSError:
+ """
+
+ try:
+ if inPlace:
+ transform.apply_in_place(im)
+ imOut = None
+ else:
+ imOut = transform.apply(im)
+ except (TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+ return imOut
+
+
+def createProfile(colorSpace, colorTemp=-1):
+ """
+ (pyCMS) Creates a profile.
+
+ If colorSpace not in ``["LAB", "XYZ", "sRGB"]``,
+ a :exc:`PyCMSError` is raised.
+
+ If using LAB and ``colorTemp`` is not a positive integer,
+ a :exc:`PyCMSError` is raised.
+
+ If an error occurs while creating the profile,
+ a :exc:`PyCMSError` is raised.
+
+ Use this function to create common profiles on-the-fly instead of
+ having to supply a profile on disk and knowing the path to it. It
+ returns a normal CmsProfile object that can be passed to
+ ImageCms.buildTransformFromOpenProfiles() to create a transform to apply
+ to images.
+
+ :param colorSpace: String, the color space of the profile you wish to
+ create.
+ Currently only "LAB", "XYZ", and "sRGB" are supported.
+ :param colorTemp: Positive integer for the white point for the profile, in
+ degrees Kelvin (i.e. 5000, 6500, 9600, etc.). The default is for D50
+ illuminant if omitted (5000k). colorTemp is ONLY applied to LAB
+ profiles, and is ignored for XYZ and sRGB.
+ :returns: A CmsProfile class object
+ :exception PyCMSError:
+ """
+
+ if colorSpace not in ["LAB", "XYZ", "sRGB"]:
+ msg = (
+ f"Color space not supported for on-the-fly profile creation ({colorSpace})"
+ )
+ raise PyCMSError(msg)
+
+ if colorSpace == "LAB":
+ try:
+ colorTemp = float(colorTemp)
+ except (TypeError, ValueError) as e:
+ msg = f'Color temperature must be numeric, "{colorTemp}" not valid'
+ raise PyCMSError(msg) from e
+
+ try:
+ return core.createProfile(colorSpace, colorTemp)
+ except (TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def getProfileName(profile):
+ """
+
+ (pyCMS) Gets the internal product name for the given profile.
+
+ If ``profile`` isn't a valid CmsProfile object or filename to a profile,
+ a :exc:`PyCMSError` is raised If an error occurs while trying
+ to obtain the name tag, a :exc:`PyCMSError` is raised.
+
+ Use this function to obtain the INTERNAL name of the profile (stored
+ in an ICC tag in the profile itself), usually the one used when the
+ profile was originally created. Sometimes this tag also contains
+ additional information supplied by the creator.
+
+ :param profile: EITHER a valid CmsProfile object, OR a string of the
+ filename of an ICC profile.
+ :returns: A string containing the internal name of the profile as stored
+ in an ICC tag.
+ :exception PyCMSError:
+ """
+
+ try:
+ # add an extra newline to preserve pyCMS compatibility
+ if not isinstance(profile, ImageCmsProfile):
+ profile = ImageCmsProfile(profile)
+ # do it in python, not c.
+ # // name was "%s - %s" (model, manufacturer) || Description ,
+ # // but if the Model and Manufacturer were the same or the model
+ # // was long, Just the model, in 1.x
+ model = profile.profile.model
+ manufacturer = profile.profile.manufacturer
+
+ if not (model or manufacturer):
+ return (profile.profile.profile_description or "") + "\n"
+ if not manufacturer or len(model) > 30:
+ return model + "\n"
+ return f"{model} - {manufacturer}\n"
+
+ except (AttributeError, OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def getProfileInfo(profile):
+ """
+ (pyCMS) Gets the internal product information for the given profile.
+
+ If ``profile`` isn't a valid CmsProfile object or filename to a profile,
+ a :exc:`PyCMSError` is raised.
+
+ If an error occurs while trying to obtain the info tag,
+ a :exc:`PyCMSError` is raised.
+
+ Use this function to obtain the information stored in the profile's
+ info tag. This often contains details about the profile, and how it
+ was created, as supplied by the creator.
+
+ :param profile: EITHER a valid CmsProfile object, OR a string of the
+ filename of an ICC profile.
+ :returns: A string containing the internal profile information stored in
+ an ICC tag.
+ :exception PyCMSError:
+ """
+
+ try:
+ if not isinstance(profile, ImageCmsProfile):
+ profile = ImageCmsProfile(profile)
+ # add an extra newline to preserve pyCMS compatibility
+ # Python, not C. the white point bits weren't working well,
+ # so skipping.
+ # info was description \r\n\r\n copyright \r\n\r\n K007 tag \r\n\r\n whitepoint
+ description = profile.profile.profile_description
+ cpright = profile.profile.copyright
+ arr = []
+ for elt in (description, cpright):
+ if elt:
+ arr.append(elt)
+ return "\r\n\r\n".join(arr) + "\r\n\r\n"
+
+ except (AttributeError, OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def getProfileCopyright(profile):
+ """
+ (pyCMS) Gets the copyright for the given profile.
+
+ If ``profile`` isn't a valid CmsProfile object or filename to a profile, a
+ :exc:`PyCMSError` is raised.
+
+ If an error occurs while trying to obtain the copyright tag,
+ a :exc:`PyCMSError` is raised.
+
+ Use this function to obtain the information stored in the profile's
+ copyright tag.
+
+ :param profile: EITHER a valid CmsProfile object, OR a string of the
+ filename of an ICC profile.
+ :returns: A string containing the internal profile information stored in
+ an ICC tag.
+ :exception PyCMSError:
+ """
+ try:
+ # add an extra newline to preserve pyCMS compatibility
+ if not isinstance(profile, ImageCmsProfile):
+ profile = ImageCmsProfile(profile)
+ return (profile.profile.copyright or "") + "\n"
+ except (AttributeError, OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def getProfileManufacturer(profile):
+ """
+ (pyCMS) Gets the manufacturer for the given profile.
+
+ If ``profile`` isn't a valid CmsProfile object or filename to a profile, a
+ :exc:`PyCMSError` is raised.
+
+ If an error occurs while trying to obtain the manufacturer tag, a
+ :exc:`PyCMSError` is raised.
+
+ Use this function to obtain the information stored in the profile's
+ manufacturer tag.
+
+ :param profile: EITHER a valid CmsProfile object, OR a string of the
+ filename of an ICC profile.
+ :returns: A string containing the internal profile information stored in
+ an ICC tag.
+ :exception PyCMSError:
+ """
+ try:
+ # add an extra newline to preserve pyCMS compatibility
+ if not isinstance(profile, ImageCmsProfile):
+ profile = ImageCmsProfile(profile)
+ return (profile.profile.manufacturer or "") + "\n"
+ except (AttributeError, OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def getProfileModel(profile):
+ """
+ (pyCMS) Gets the model for the given profile.
+
+ If ``profile`` isn't a valid CmsProfile object or filename to a profile, a
+ :exc:`PyCMSError` is raised.
+
+ If an error occurs while trying to obtain the model tag,
+ a :exc:`PyCMSError` is raised.
+
+ Use this function to obtain the information stored in the profile's
+ model tag.
+
+ :param profile: EITHER a valid CmsProfile object, OR a string of the
+ filename of an ICC profile.
+ :returns: A string containing the internal profile information stored in
+ an ICC tag.
+ :exception PyCMSError:
+ """
+
+ try:
+ # add an extra newline to preserve pyCMS compatibility
+ if not isinstance(profile, ImageCmsProfile):
+ profile = ImageCmsProfile(profile)
+ return (profile.profile.model or "") + "\n"
+ except (AttributeError, OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def getProfileDescription(profile):
+ """
+ (pyCMS) Gets the description for the given profile.
+
+ If ``profile`` isn't a valid CmsProfile object or filename to a profile, a
+ :exc:`PyCMSError` is raised.
+
+ If an error occurs while trying to obtain the description tag,
+ a :exc:`PyCMSError` is raised.
+
+ Use this function to obtain the information stored in the profile's
+ description tag.
+
+ :param profile: EITHER a valid CmsProfile object, OR a string of the
+ filename of an ICC profile.
+ :returns: A string containing the internal profile information stored in an
+ ICC tag.
+ :exception PyCMSError:
+ """
+
+ try:
+ # add an extra newline to preserve pyCMS compatibility
+ if not isinstance(profile, ImageCmsProfile):
+ profile = ImageCmsProfile(profile)
+ return (profile.profile.profile_description or "") + "\n"
+ except (AttributeError, OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def getDefaultIntent(profile):
+ """
+ (pyCMS) Gets the default intent name for the given profile.
+
+ If ``profile`` isn't a valid CmsProfile object or filename to a profile, a
+ :exc:`PyCMSError` is raised.
+
+ If an error occurs while trying to obtain the default intent, a
+ :exc:`PyCMSError` is raised.
+
+ Use this function to determine the default (and usually best optimized)
+ rendering intent for this profile. Most profiles support multiple
+ rendering intents, but are intended mostly for one type of conversion.
+ If you wish to use a different intent than returned, use
+ ImageCms.isIntentSupported() to verify it will work first.
+
+ :param profile: EITHER a valid CmsProfile object, OR a string of the
+ filename of an ICC profile.
+ :returns: Integer 0-3 specifying the default rendering intent for this
+ profile.
+
+ ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT)
+ ImageCms.Intent.RELATIVE_COLORIMETRIC = 1
+ ImageCms.Intent.SATURATION = 2
+ ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3
+
+ see the pyCMS documentation for details on rendering intents and what
+ they do.
+ :exception PyCMSError:
+ """
+
+ try:
+ if not isinstance(profile, ImageCmsProfile):
+ profile = ImageCmsProfile(profile)
+ return profile.profile.rendering_intent
+ except (AttributeError, OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def isIntentSupported(profile, intent, direction):
+ """
+ (pyCMS) Checks if a given intent is supported.
+
+ Use this function to verify that you can use your desired
+ ``intent`` with ``profile``, and that ``profile`` can be used for the
+ input/output/proof profile as you desire.
+
+ Some profiles are created specifically for one "direction", can cannot
+ be used for others. Some profiles can only be used for certain
+ rendering intents, so it's best to either verify this before trying
+ to create a transform with them (using this function), or catch the
+ potential :exc:`PyCMSError` that will occur if they don't
+ support the modes you select.
+
+ :param profile: EITHER a valid CmsProfile object, OR a string of the
+ filename of an ICC profile.
+ :param intent: Integer (0-3) specifying the rendering intent you wish to
+ use with this profile
+
+ ImageCms.Intent.PERCEPTUAL = 0 (DEFAULT)
+ ImageCms.Intent.RELATIVE_COLORIMETRIC = 1
+ ImageCms.Intent.SATURATION = 2
+ ImageCms.Intent.ABSOLUTE_COLORIMETRIC = 3
+
+ see the pyCMS documentation for details on rendering intents and what
+ they do.
+ :param direction: Integer specifying if the profile is to be used for
+ input, output, or proof
+
+ INPUT = 0 (or use ImageCms.Direction.INPUT)
+ OUTPUT = 1 (or use ImageCms.Direction.OUTPUT)
+ PROOF = 2 (or use ImageCms.Direction.PROOF)
+
+ :returns: 1 if the intent/direction are supported, -1 if they are not.
+ :exception PyCMSError:
+ """
+
+ try:
+ if not isinstance(profile, ImageCmsProfile):
+ profile = ImageCmsProfile(profile)
+ # FIXME: I get different results for the same data w. different
+ # compilers. Bug in LittleCMS or in the binding?
+ if profile.profile.is_intent_supported(intent, direction):
+ return 1
+ else:
+ return -1
+ except (AttributeError, OSError, TypeError, ValueError) as v:
+ raise PyCMSError(v) from v
+
+
+def versions():
+ """
+ (pyCMS) Fetches versions.
+ """
+
+ return VERSION, core.littlecms_version, sys.version.split()[0], Image.__version__
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageColor.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageColor.py
new file mode 100644
index 00000000..befc1fd1
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageColor.py
@@ -0,0 +1,313 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# map CSS3-style colour description strings to RGB
+#
+# History:
+# 2002-10-24 fl Added support for CSS-style color strings
+# 2002-12-15 fl Added RGBA support
+# 2004-03-27 fl Fixed remaining int() problems for Python 1.5.2
+# 2004-07-19 fl Fixed gray/grey spelling issues
+# 2009-03-05 fl Fixed rounding error in grayscale calculation
+#
+# Copyright (c) 2002-2004 by Secret Labs AB
+# Copyright (c) 2002-2004 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+import re
+
+from . import Image
+
+
+def getrgb(color):
+ """
+ Convert a color string to an RGB or RGBA tuple. If the string cannot be
+ parsed, this function raises a :py:exc:`ValueError` exception.
+
+ .. versionadded:: 1.1.4
+
+ :param color: A color string
+ :return: ``(red, green, blue[, alpha])``
+ """
+ if len(color) > 100:
+ msg = "color specifier is too long"
+ raise ValueError(msg)
+ color = color.lower()
+
+ rgb = colormap.get(color, None)
+ if rgb:
+ if isinstance(rgb, tuple):
+ return rgb
+ colormap[color] = rgb = getrgb(rgb)
+ return rgb
+
+ # check for known string formats
+ if re.match("#[a-f0-9]{3}$", color):
+ return int(color[1] * 2, 16), int(color[2] * 2, 16), int(color[3] * 2, 16)
+
+ if re.match("#[a-f0-9]{4}$", color):
+ return (
+ int(color[1] * 2, 16),
+ int(color[2] * 2, 16),
+ int(color[3] * 2, 16),
+ int(color[4] * 2, 16),
+ )
+
+ if re.match("#[a-f0-9]{6}$", color):
+ return int(color[1:3], 16), int(color[3:5], 16), int(color[5:7], 16)
+
+ if re.match("#[a-f0-9]{8}$", color):
+ return (
+ int(color[1:3], 16),
+ int(color[3:5], 16),
+ int(color[5:7], 16),
+ int(color[7:9], 16),
+ )
+
+ m = re.match(r"rgb\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color)
+ if m:
+ return int(m.group(1)), int(m.group(2)), int(m.group(3))
+
+ m = re.match(r"rgb\(\s*(\d+)%\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)$", color)
+ if m:
+ return (
+ int((int(m.group(1)) * 255) / 100.0 + 0.5),
+ int((int(m.group(2)) * 255) / 100.0 + 0.5),
+ int((int(m.group(3)) * 255) / 100.0 + 0.5),
+ )
+
+ m = re.match(
+ r"hsl\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color
+ )
+ if m:
+ from colorsys import hls_to_rgb
+
+ rgb = hls_to_rgb(
+ float(m.group(1)) / 360.0,
+ float(m.group(3)) / 100.0,
+ float(m.group(2)) / 100.0,
+ )
+ return (
+ int(rgb[0] * 255 + 0.5),
+ int(rgb[1] * 255 + 0.5),
+ int(rgb[2] * 255 + 0.5),
+ )
+
+ m = re.match(
+ r"hs[bv]\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color
+ )
+ if m:
+ from colorsys import hsv_to_rgb
+
+ rgb = hsv_to_rgb(
+ float(m.group(1)) / 360.0,
+ float(m.group(2)) / 100.0,
+ float(m.group(3)) / 100.0,
+ )
+ return (
+ int(rgb[0] * 255 + 0.5),
+ int(rgb[1] * 255 + 0.5),
+ int(rgb[2] * 255 + 0.5),
+ )
+
+ m = re.match(r"rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color)
+ if m:
+ return int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4))
+ msg = f"unknown color specifier: {repr(color)}"
+ raise ValueError(msg)
+
+
+def getcolor(color, mode):
+ """
+ Same as :py:func:`~PIL.ImageColor.getrgb` for most modes. However, if
+ ``mode`` is HSV, converts the RGB value to a HSV value, or if ``mode`` is
+ not color or a palette image, converts the RGB value to a greyscale value.
+ If the string cannot be parsed, this function raises a :py:exc:`ValueError`
+ exception.
+
+ .. versionadded:: 1.1.4
+
+ :param color: A color string
+ :param mode: Convert result to this mode
+ :return: ``(graylevel[, alpha]) or (red, green, blue[, alpha])``
+ """
+ # same as getrgb, but converts the result to the given mode
+ color, alpha = getrgb(color), 255
+ if len(color) == 4:
+ color, alpha = color[:3], color[3]
+
+ if mode == "HSV":
+ from colorsys import rgb_to_hsv
+
+ r, g, b = color
+ h, s, v = rgb_to_hsv(r / 255, g / 255, b / 255)
+ return int(h * 255), int(s * 255), int(v * 255)
+ elif Image.getmodebase(mode) == "L":
+ r, g, b = color
+ # ITU-R Recommendation 601-2 for nonlinear RGB
+ # scaled to 24 bits to match the convert's implementation.
+ color = (r * 19595 + g * 38470 + b * 7471 + 0x8000) >> 16
+ if mode[-1] == "A":
+ return color, alpha
+ else:
+ if mode[-1] == "A":
+ return color + (alpha,)
+ return color
+
+
+colormap = {
+ # X11 colour table from https://drafts.csswg.org/css-color-4/, with
+ # gray/grey spelling issues fixed. This is a superset of HTML 4.0
+ # colour names used in CSS 1.
+ "aliceblue": "#f0f8ff",
+ "antiquewhite": "#faebd7",
+ "aqua": "#00ffff",
+ "aquamarine": "#7fffd4",
+ "azure": "#f0ffff",
+ "beige": "#f5f5dc",
+ "bisque": "#ffe4c4",
+ "black": "#000000",
+ "blanchedalmond": "#ffebcd",
+ "blue": "#0000ff",
+ "blueviolet": "#8a2be2",
+ "brown": "#a52a2a",
+ "burlywood": "#deb887",
+ "cadetblue": "#5f9ea0",
+ "chartreuse": "#7fff00",
+ "chocolate": "#d2691e",
+ "coral": "#ff7f50",
+ "cornflowerblue": "#6495ed",
+ "cornsilk": "#fff8dc",
+ "crimson": "#dc143c",
+ "cyan": "#00ffff",
+ "darkblue": "#00008b",
+ "darkcyan": "#008b8b",
+ "darkgoldenrod": "#b8860b",
+ "darkgray": "#a9a9a9",
+ "darkgrey": "#a9a9a9",
+ "darkgreen": "#006400",
+ "darkkhaki": "#bdb76b",
+ "darkmagenta": "#8b008b",
+ "darkolivegreen": "#556b2f",
+ "darkorange": "#ff8c00",
+ "darkorchid": "#9932cc",
+ "darkred": "#8b0000",
+ "darksalmon": "#e9967a",
+ "darkseagreen": "#8fbc8f",
+ "darkslateblue": "#483d8b",
+ "darkslategray": "#2f4f4f",
+ "darkslategrey": "#2f4f4f",
+ "darkturquoise": "#00ced1",
+ "darkviolet": "#9400d3",
+ "deeppink": "#ff1493",
+ "deepskyblue": "#00bfff",
+ "dimgray": "#696969",
+ "dimgrey": "#696969",
+ "dodgerblue": "#1e90ff",
+ "firebrick": "#b22222",
+ "floralwhite": "#fffaf0",
+ "forestgreen": "#228b22",
+ "fuchsia": "#ff00ff",
+ "gainsboro": "#dcdcdc",
+ "ghostwhite": "#f8f8ff",
+ "gold": "#ffd700",
+ "goldenrod": "#daa520",
+ "gray": "#808080",
+ "grey": "#808080",
+ "green": "#008000",
+ "greenyellow": "#adff2f",
+ "honeydew": "#f0fff0",
+ "hotpink": "#ff69b4",
+ "indianred": "#cd5c5c",
+ "indigo": "#4b0082",
+ "ivory": "#fffff0",
+ "khaki": "#f0e68c",
+ "lavender": "#e6e6fa",
+ "lavenderblush": "#fff0f5",
+ "lawngreen": "#7cfc00",
+ "lemonchiffon": "#fffacd",
+ "lightblue": "#add8e6",
+ "lightcoral": "#f08080",
+ "lightcyan": "#e0ffff",
+ "lightgoldenrodyellow": "#fafad2",
+ "lightgreen": "#90ee90",
+ "lightgray": "#d3d3d3",
+ "lightgrey": "#d3d3d3",
+ "lightpink": "#ffb6c1",
+ "lightsalmon": "#ffa07a",
+ "lightseagreen": "#20b2aa",
+ "lightskyblue": "#87cefa",
+ "lightslategray": "#778899",
+ "lightslategrey": "#778899",
+ "lightsteelblue": "#b0c4de",
+ "lightyellow": "#ffffe0",
+ "lime": "#00ff00",
+ "limegreen": "#32cd32",
+ "linen": "#faf0e6",
+ "magenta": "#ff00ff",
+ "maroon": "#800000",
+ "mediumaquamarine": "#66cdaa",
+ "mediumblue": "#0000cd",
+ "mediumorchid": "#ba55d3",
+ "mediumpurple": "#9370db",
+ "mediumseagreen": "#3cb371",
+ "mediumslateblue": "#7b68ee",
+ "mediumspringgreen": "#00fa9a",
+ "mediumturquoise": "#48d1cc",
+ "mediumvioletred": "#c71585",
+ "midnightblue": "#191970",
+ "mintcream": "#f5fffa",
+ "mistyrose": "#ffe4e1",
+ "moccasin": "#ffe4b5",
+ "navajowhite": "#ffdead",
+ "navy": "#000080",
+ "oldlace": "#fdf5e6",
+ "olive": "#808000",
+ "olivedrab": "#6b8e23",
+ "orange": "#ffa500",
+ "orangered": "#ff4500",
+ "orchid": "#da70d6",
+ "palegoldenrod": "#eee8aa",
+ "palegreen": "#98fb98",
+ "paleturquoise": "#afeeee",
+ "palevioletred": "#db7093",
+ "papayawhip": "#ffefd5",
+ "peachpuff": "#ffdab9",
+ "peru": "#cd853f",
+ "pink": "#ffc0cb",
+ "plum": "#dda0dd",
+ "powderblue": "#b0e0e6",
+ "purple": "#800080",
+ "rebeccapurple": "#663399",
+ "red": "#ff0000",
+ "rosybrown": "#bc8f8f",
+ "royalblue": "#4169e1",
+ "saddlebrown": "#8b4513",
+ "salmon": "#fa8072",
+ "sandybrown": "#f4a460",
+ "seagreen": "#2e8b57",
+ "seashell": "#fff5ee",
+ "sienna": "#a0522d",
+ "silver": "#c0c0c0",
+ "skyblue": "#87ceeb",
+ "slateblue": "#6a5acd",
+ "slategray": "#708090",
+ "slategrey": "#708090",
+ "snow": "#fffafa",
+ "springgreen": "#00ff7f",
+ "steelblue": "#4682b4",
+ "tan": "#d2b48c",
+ "teal": "#008080",
+ "thistle": "#d8bfd8",
+ "tomato": "#ff6347",
+ "turquoise": "#40e0d0",
+ "violet": "#ee82ee",
+ "wheat": "#f5deb3",
+ "white": "#ffffff",
+ "whitesmoke": "#f5f5f5",
+ "yellow": "#ffff00",
+ "yellowgreen": "#9acd32",
+}
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageDraw.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageDraw.py
new file mode 100644
index 00000000..fbf320d7
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageDraw.py
@@ -0,0 +1,1062 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# drawing interface operations
+#
+# History:
+# 1996-04-13 fl Created (experimental)
+# 1996-08-07 fl Filled polygons, ellipses.
+# 1996-08-13 fl Added text support
+# 1998-06-28 fl Handle I and F images
+# 1998-12-29 fl Added arc; use arc primitive to draw ellipses
+# 1999-01-10 fl Added shape stuff (experimental)
+# 1999-02-06 fl Added bitmap support
+# 1999-02-11 fl Changed all primitives to take options
+# 1999-02-20 fl Fixed backwards compatibility
+# 2000-10-12 fl Copy on write, when necessary
+# 2001-02-18 fl Use default ink for bitmap/text also in fill mode
+# 2002-10-24 fl Added support for CSS-style color strings
+# 2002-12-10 fl Added experimental support for RGBA-on-RGB drawing
+# 2002-12-11 fl Refactored low-level drawing API (work in progress)
+# 2004-08-26 fl Made Draw() a factory function, added getdraw() support
+# 2004-09-04 fl Added width support to line primitive
+# 2004-09-10 fl Added font mode handling
+# 2006-06-19 fl Added font bearing support (getmask2)
+#
+# Copyright (c) 1997-2006 by Secret Labs AB
+# Copyright (c) 1996-2006 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+import math
+import numbers
+
+from . import Image, ImageColor
+
+"""
+A simple 2D drawing interface for PIL images.
+
+Application code should use the Draw factory, instead of
+directly.
+"""
+
+
+class ImageDraw:
+ font = None
+
+ def __init__(self, im, mode=None):
+ """
+ Create a drawing instance.
+
+ :param im: The image to draw in.
+ :param mode: Optional mode to use for color values. For RGB
+ images, this argument can be RGB or RGBA (to blend the
+ drawing into the image). For all other modes, this argument
+ must be the same as the image mode. If omitted, the mode
+ defaults to the mode of the image.
+ """
+ im.load()
+ if im.readonly:
+ im._copy() # make it writeable
+ blend = 0
+ if mode is None:
+ mode = im.mode
+ if mode != im.mode:
+ if mode == "RGBA" and im.mode == "RGB":
+ blend = 1
+ else:
+ msg = "mode mismatch"
+ raise ValueError(msg)
+ if mode == "P":
+ self.palette = im.palette
+ else:
+ self.palette = None
+ self._image = im
+ self.im = im.im
+ self.draw = Image.core.draw(self.im, blend)
+ self.mode = mode
+ if mode in ("I", "F"):
+ self.ink = self.draw.draw_ink(1)
+ else:
+ self.ink = self.draw.draw_ink(-1)
+ if mode in ("1", "P", "I", "F"):
+ # FIXME: fix Fill2 to properly support matte for I+F images
+ self.fontmode = "1"
+ else:
+ self.fontmode = "L" # aliasing is okay for other modes
+ self.fill = False
+
+ def getfont(self):
+ """
+ Get the current default font.
+
+ To set the default font for this ImageDraw instance::
+
+ from PIL import ImageDraw, ImageFont
+ draw.font = ImageFont.truetype("Tests/fonts/FreeMono.ttf")
+
+ To set the default font for all future ImageDraw instances::
+
+ from PIL import ImageDraw, ImageFont
+ ImageDraw.ImageDraw.font = ImageFont.truetype("Tests/fonts/FreeMono.ttf")
+
+ If the current default font is ``None``,
+ it is initialized with ``ImageFont.load_default()``.
+
+ :returns: An image font."""
+ if not self.font:
+ # FIXME: should add a font repository
+ from . import ImageFont
+
+ self.font = ImageFont.load_default()
+ return self.font
+
+ def _getfont(self, font_size):
+ if font_size is not None:
+ from . import ImageFont
+
+ font = ImageFont.load_default(font_size)
+ else:
+ font = self.getfont()
+ return font
+
+ def _getink(self, ink, fill=None):
+ if ink is None and fill is None:
+ if self.fill:
+ fill = self.ink
+ else:
+ ink = self.ink
+ else:
+ if ink is not None:
+ if isinstance(ink, str):
+ ink = ImageColor.getcolor(ink, self.mode)
+ if self.palette and not isinstance(ink, numbers.Number):
+ ink = self.palette.getcolor(ink, self._image)
+ ink = self.draw.draw_ink(ink)
+ if fill is not None:
+ if isinstance(fill, str):
+ fill = ImageColor.getcolor(fill, self.mode)
+ if self.palette and not isinstance(fill, numbers.Number):
+ fill = self.palette.getcolor(fill, self._image)
+ fill = self.draw.draw_ink(fill)
+ return ink, fill
+
+ def arc(self, xy, start, end, fill=None, width=1):
+ """Draw an arc."""
+ ink, fill = self._getink(fill)
+ if ink is not None:
+ self.draw.draw_arc(xy, start, end, ink, width)
+
+ def bitmap(self, xy, bitmap, fill=None):
+ """Draw a bitmap."""
+ bitmap.load()
+ ink, fill = self._getink(fill)
+ if ink is None:
+ ink = fill
+ if ink is not None:
+ self.draw.draw_bitmap(xy, bitmap.im, ink)
+
+ def chord(self, xy, start, end, fill=None, outline=None, width=1):
+ """Draw a chord."""
+ ink, fill = self._getink(outline, fill)
+ if fill is not None:
+ self.draw.draw_chord(xy, start, end, fill, 1)
+ if ink is not None and ink != fill and width != 0:
+ self.draw.draw_chord(xy, start, end, ink, 0, width)
+
+ def ellipse(self, xy, fill=None, outline=None, width=1):
+ """Draw an ellipse."""
+ ink, fill = self._getink(outline, fill)
+ if fill is not None:
+ self.draw.draw_ellipse(xy, fill, 1)
+ if ink is not None and ink != fill and width != 0:
+ self.draw.draw_ellipse(xy, ink, 0, width)
+
+ def line(self, xy, fill=None, width=0, joint=None):
+ """Draw a line, or a connected sequence of line segments."""
+ ink = self._getink(fill)[0]
+ if ink is not None:
+ self.draw.draw_lines(xy, ink, width)
+ if joint == "curve" and width > 4:
+ if not isinstance(xy[0], (list, tuple)):
+ xy = [tuple(xy[i : i + 2]) for i in range(0, len(xy), 2)]
+ for i in range(1, len(xy) - 1):
+ point = xy[i]
+ angles = [
+ math.degrees(math.atan2(end[0] - start[0], start[1] - end[1]))
+ % 360
+ for start, end in ((xy[i - 1], point), (point, xy[i + 1]))
+ ]
+ if angles[0] == angles[1]:
+ # This is a straight line, so no joint is required
+ continue
+
+ def coord_at_angle(coord, angle):
+ x, y = coord
+ angle -= 90
+ distance = width / 2 - 1
+ return tuple(
+ p + (math.floor(p_d) if p_d > 0 else math.ceil(p_d))
+ for p, p_d in (
+ (x, distance * math.cos(math.radians(angle))),
+ (y, distance * math.sin(math.radians(angle))),
+ )
+ )
+
+ flipped = (
+ angles[1] > angles[0] and angles[1] - 180 > angles[0]
+ ) or (angles[1] < angles[0] and angles[1] + 180 > angles[0])
+ coords = [
+ (point[0] - width / 2 + 1, point[1] - width / 2 + 1),
+ (point[0] + width / 2 - 1, point[1] + width / 2 - 1),
+ ]
+ if flipped:
+ start, end = (angles[1] + 90, angles[0] + 90)
+ else:
+ start, end = (angles[0] - 90, angles[1] - 90)
+ self.pieslice(coords, start - 90, end - 90, fill)
+
+ if width > 8:
+ # Cover potential gaps between the line and the joint
+ if flipped:
+ gap_coords = [
+ coord_at_angle(point, angles[0] + 90),
+ point,
+ coord_at_angle(point, angles[1] + 90),
+ ]
+ else:
+ gap_coords = [
+ coord_at_angle(point, angles[0] - 90),
+ point,
+ coord_at_angle(point, angles[1] - 90),
+ ]
+ self.line(gap_coords, fill, width=3)
+
+ def shape(self, shape, fill=None, outline=None):
+ """(Experimental) Draw a shape."""
+ shape.close()
+ ink, fill = self._getink(outline, fill)
+ if fill is not None:
+ self.draw.draw_outline(shape, fill, 1)
+ if ink is not None and ink != fill:
+ self.draw.draw_outline(shape, ink, 0)
+
+ def pieslice(self, xy, start, end, fill=None, outline=None, width=1):
+ """Draw a pieslice."""
+ ink, fill = self._getink(outline, fill)
+ if fill is not None:
+ self.draw.draw_pieslice(xy, start, end, fill, 1)
+ if ink is not None and ink != fill and width != 0:
+ self.draw.draw_pieslice(xy, start, end, ink, 0, width)
+
+ def point(self, xy, fill=None):
+ """Draw one or more individual pixels."""
+ ink, fill = self._getink(fill)
+ if ink is not None:
+ self.draw.draw_points(xy, ink)
+
+ def polygon(self, xy, fill=None, outline=None, width=1):
+ """Draw a polygon."""
+ ink, fill = self._getink(outline, fill)
+ if fill is not None:
+ self.draw.draw_polygon(xy, fill, 1)
+ if ink is not None and ink != fill and width != 0:
+ if width == 1:
+ self.draw.draw_polygon(xy, ink, 0, width)
+ else:
+ # To avoid expanding the polygon outwards,
+ # use the fill as a mask
+ mask = Image.new("1", self.im.size)
+ mask_ink = self._getink(1)[0]
+
+ fill_im = mask.copy()
+ draw = Draw(fill_im)
+ draw.draw.draw_polygon(xy, mask_ink, 1)
+
+ ink_im = mask.copy()
+ draw = Draw(ink_im)
+ width = width * 2 - 1
+ draw.draw.draw_polygon(xy, mask_ink, 0, width)
+
+ mask.paste(ink_im, mask=fill_im)
+
+ im = Image.new(self.mode, self.im.size)
+ draw = Draw(im)
+ draw.draw.draw_polygon(xy, ink, 0, width)
+ self.im.paste(im.im, (0, 0) + im.size, mask.im)
+
+ def regular_polygon(
+ self, bounding_circle, n_sides, rotation=0, fill=None, outline=None, width=1
+ ):
+ """Draw a regular polygon."""
+ xy = _compute_regular_polygon_vertices(bounding_circle, n_sides, rotation)
+ self.polygon(xy, fill, outline, width)
+
+ def rectangle(self, xy, fill=None, outline=None, width=1):
+ """Draw a rectangle."""
+ ink, fill = self._getink(outline, fill)
+ if fill is not None:
+ self.draw.draw_rectangle(xy, fill, 1)
+ if ink is not None and ink != fill and width != 0:
+ self.draw.draw_rectangle(xy, ink, 0, width)
+
+ def rounded_rectangle(
+ self, xy, radius=0, fill=None, outline=None, width=1, *, corners=None
+ ):
+ """Draw a rounded rectangle."""
+ if isinstance(xy[0], (list, tuple)):
+ (x0, y0), (x1, y1) = xy
+ else:
+ x0, y0, x1, y1 = xy
+ if x1 < x0:
+ msg = "x1 must be greater than or equal to x0"
+ raise ValueError(msg)
+ if y1 < y0:
+ msg = "y1 must be greater than or equal to y0"
+ raise ValueError(msg)
+ if corners is None:
+ corners = (True, True, True, True)
+
+ d = radius * 2
+
+ full_x, full_y = False, False
+ if all(corners):
+ full_x = d >= x1 - x0 - 1
+ if full_x:
+ # The two left and two right corners are joined
+ d = x1 - x0
+ full_y = d >= y1 - y0 - 1
+ if full_y:
+ # The two top and two bottom corners are joined
+ d = y1 - y0
+ if full_x and full_y:
+ # If all corners are joined, that is a circle
+ return self.ellipse(xy, fill, outline, width)
+
+ if d == 0 or not any(corners):
+ # If the corners have no curve,
+ # or there are no corners,
+ # that is a rectangle
+ return self.rectangle(xy, fill, outline, width)
+
+ r = d // 2
+ ink, fill = self._getink(outline, fill)
+
+ def draw_corners(pieslice):
+ if full_x:
+ # Draw top and bottom halves
+ parts = (
+ ((x0, y0, x0 + d, y0 + d), 180, 360),
+ ((x0, y1 - d, x0 + d, y1), 0, 180),
+ )
+ elif full_y:
+ # Draw left and right halves
+ parts = (
+ ((x0, y0, x0 + d, y0 + d), 90, 270),
+ ((x1 - d, y0, x1, y0 + d), 270, 90),
+ )
+ else:
+ # Draw four separate corners
+ parts = []
+ for i, part in enumerate(
+ (
+ ((x0, y0, x0 + d, y0 + d), 180, 270),
+ ((x1 - d, y0, x1, y0 + d), 270, 360),
+ ((x1 - d, y1 - d, x1, y1), 0, 90),
+ ((x0, y1 - d, x0 + d, y1), 90, 180),
+ )
+ ):
+ if corners[i]:
+ parts.append(part)
+ for part in parts:
+ if pieslice:
+ self.draw.draw_pieslice(*(part + (fill, 1)))
+ else:
+ self.draw.draw_arc(*(part + (ink, width)))
+
+ if fill is not None:
+ draw_corners(True)
+
+ if full_x:
+ self.draw.draw_rectangle((x0, y0 + r + 1, x1, y1 - r - 1), fill, 1)
+ else:
+ self.draw.draw_rectangle((x0 + r + 1, y0, x1 - r - 1, y1), fill, 1)
+ if not full_x and not full_y:
+ left = [x0, y0, x0 + r, y1]
+ if corners[0]:
+ left[1] += r + 1
+ if corners[3]:
+ left[3] -= r + 1
+ self.draw.draw_rectangle(left, fill, 1)
+
+ right = [x1 - r, y0, x1, y1]
+ if corners[1]:
+ right[1] += r + 1
+ if corners[2]:
+ right[3] -= r + 1
+ self.draw.draw_rectangle(right, fill, 1)
+ if ink is not None and ink != fill and width != 0:
+ draw_corners(False)
+
+ if not full_x:
+ top = [x0, y0, x1, y0 + width - 1]
+ if corners[0]:
+ top[0] += r + 1
+ if corners[1]:
+ top[2] -= r + 1
+ self.draw.draw_rectangle(top, ink, 1)
+
+ bottom = [x0, y1 - width + 1, x1, y1]
+ if corners[3]:
+ bottom[0] += r + 1
+ if corners[2]:
+ bottom[2] -= r + 1
+ self.draw.draw_rectangle(bottom, ink, 1)
+ if not full_y:
+ left = [x0, y0, x0 + width - 1, y1]
+ if corners[0]:
+ left[1] += r + 1
+ if corners[3]:
+ left[3] -= r + 1
+ self.draw.draw_rectangle(left, ink, 1)
+
+ right = [x1 - width + 1, y0, x1, y1]
+ if corners[1]:
+ right[1] += r + 1
+ if corners[2]:
+ right[3] -= r + 1
+ self.draw.draw_rectangle(right, ink, 1)
+
+ def _multiline_check(self, text):
+ split_character = "\n" if isinstance(text, str) else b"\n"
+
+ return split_character in text
+
+ def _multiline_split(self, text):
+ split_character = "\n" if isinstance(text, str) else b"\n"
+
+ return text.split(split_character)
+
+ def _multiline_spacing(self, font, spacing, stroke_width):
+ return (
+ self.textbbox((0, 0), "A", font, stroke_width=stroke_width)[3]
+ + stroke_width
+ + spacing
+ )
+
+ def text(
+ self,
+ xy,
+ text,
+ fill=None,
+ font=None,
+ anchor=None,
+ spacing=4,
+ align="left",
+ direction=None,
+ features=None,
+ language=None,
+ stroke_width=0,
+ stroke_fill=None,
+ embedded_color=False,
+ *args,
+ **kwargs,
+ ):
+ """Draw text."""
+ if embedded_color and self.mode not in ("RGB", "RGBA"):
+ msg = "Embedded color supported only in RGB and RGBA modes"
+ raise ValueError(msg)
+
+ if font is None:
+ font = self._getfont(kwargs.get("font_size"))
+
+ if self._multiline_check(text):
+ return self.multiline_text(
+ xy,
+ text,
+ fill,
+ font,
+ anchor,
+ spacing,
+ align,
+ direction,
+ features,
+ language,
+ stroke_width,
+ stroke_fill,
+ embedded_color,
+ )
+
+ def getink(fill):
+ ink, fill = self._getink(fill)
+ if ink is None:
+ return fill
+ return ink
+
+ def draw_text(ink, stroke_width=0, stroke_offset=None):
+ mode = self.fontmode
+ if stroke_width == 0 and embedded_color:
+ mode = "RGBA"
+ coord = []
+ start = []
+ for i in range(2):
+ coord.append(int(xy[i]))
+ start.append(math.modf(xy[i])[0])
+ try:
+ mask, offset = font.getmask2(
+ text,
+ mode,
+ direction=direction,
+ features=features,
+ language=language,
+ stroke_width=stroke_width,
+ anchor=anchor,
+ ink=ink,
+ start=start,
+ *args,
+ **kwargs,
+ )
+ coord = coord[0] + offset[0], coord[1] + offset[1]
+ except AttributeError:
+ try:
+ mask = font.getmask(
+ text,
+ mode,
+ direction,
+ features,
+ language,
+ stroke_width,
+ anchor,
+ ink,
+ start=start,
+ *args,
+ **kwargs,
+ )
+ except TypeError:
+ mask = font.getmask(text)
+ if stroke_offset:
+ coord = coord[0] + stroke_offset[0], coord[1] + stroke_offset[1]
+ if mode == "RGBA":
+ # font.getmask2(mode="RGBA") returns color in RGB bands and mask in A
+ # extract mask and set text alpha
+ color, mask = mask, mask.getband(3)
+ color.fillband(3, (ink >> 24) & 0xFF)
+ x, y = coord
+ self.im.paste(color, (x, y, x + mask.size[0], y + mask.size[1]), mask)
+ else:
+ self.draw.draw_bitmap(coord, mask, ink)
+
+ ink = getink(fill)
+ if ink is not None:
+ stroke_ink = None
+ if stroke_width:
+ stroke_ink = getink(stroke_fill) if stroke_fill is not None else ink
+
+ if stroke_ink is not None:
+ # Draw stroked text
+ draw_text(stroke_ink, stroke_width)
+
+ # Draw normal text
+ draw_text(ink, 0)
+ else:
+ # Only draw normal text
+ draw_text(ink)
+
+ def multiline_text(
+ self,
+ xy,
+ text,
+ fill=None,
+ font=None,
+ anchor=None,
+ spacing=4,
+ align="left",
+ direction=None,
+ features=None,
+ language=None,
+ stroke_width=0,
+ stroke_fill=None,
+ embedded_color=False,
+ *,
+ font_size=None,
+ ):
+ if direction == "ttb":
+ msg = "ttb direction is unsupported for multiline text"
+ raise ValueError(msg)
+
+ if anchor is None:
+ anchor = "la"
+ elif len(anchor) != 2:
+ msg = "anchor must be a 2 character string"
+ raise ValueError(msg)
+ elif anchor[1] in "tb":
+ msg = "anchor not supported for multiline text"
+ raise ValueError(msg)
+
+ if font is None:
+ font = self._getfont(font_size)
+
+ widths = []
+ max_width = 0
+ lines = self._multiline_split(text)
+ line_spacing = self._multiline_spacing(font, spacing, stroke_width)
+ for line in lines:
+ line_width = self.textlength(
+ line, font, direction=direction, features=features, language=language
+ )
+ widths.append(line_width)
+ max_width = max(max_width, line_width)
+
+ top = xy[1]
+ if anchor[1] == "m":
+ top -= (len(lines) - 1) * line_spacing / 2.0
+ elif anchor[1] == "d":
+ top -= (len(lines) - 1) * line_spacing
+
+ for idx, line in enumerate(lines):
+ left = xy[0]
+ width_difference = max_width - widths[idx]
+
+ # first align left by anchor
+ if anchor[0] == "m":
+ left -= width_difference / 2.0
+ elif anchor[0] == "r":
+ left -= width_difference
+
+ # then align by align parameter
+ if align == "left":
+ pass
+ elif align == "center":
+ left += width_difference / 2.0
+ elif align == "right":
+ left += width_difference
+ else:
+ msg = 'align must be "left", "center" or "right"'
+ raise ValueError(msg)
+
+ self.text(
+ (left, top),
+ line,
+ fill,
+ font,
+ anchor,
+ direction=direction,
+ features=features,
+ language=language,
+ stroke_width=stroke_width,
+ stroke_fill=stroke_fill,
+ embedded_color=embedded_color,
+ )
+ top += line_spacing
+
+ def textlength(
+ self,
+ text,
+ font=None,
+ direction=None,
+ features=None,
+ language=None,
+ embedded_color=False,
+ *,
+ font_size=None,
+ ):
+ """Get the length of a given string, in pixels with 1/64 precision."""
+ if self._multiline_check(text):
+ msg = "can't measure length of multiline text"
+ raise ValueError(msg)
+ if embedded_color and self.mode not in ("RGB", "RGBA"):
+ msg = "Embedded color supported only in RGB and RGBA modes"
+ raise ValueError(msg)
+
+ if font is None:
+ font = self._getfont(font_size)
+ mode = "RGBA" if embedded_color else self.fontmode
+ return font.getlength(text, mode, direction, features, language)
+
+ def textbbox(
+ self,
+ xy,
+ text,
+ font=None,
+ anchor=None,
+ spacing=4,
+ align="left",
+ direction=None,
+ features=None,
+ language=None,
+ stroke_width=0,
+ embedded_color=False,
+ *,
+ font_size=None,
+ ):
+ """Get the bounding box of a given string, in pixels."""
+ if embedded_color and self.mode not in ("RGB", "RGBA"):
+ msg = "Embedded color supported only in RGB and RGBA modes"
+ raise ValueError(msg)
+
+ if font is None:
+ font = self._getfont(font_size)
+
+ if self._multiline_check(text):
+ return self.multiline_textbbox(
+ xy,
+ text,
+ font,
+ anchor,
+ spacing,
+ align,
+ direction,
+ features,
+ language,
+ stroke_width,
+ embedded_color,
+ )
+
+ mode = "RGBA" if embedded_color else self.fontmode
+ bbox = font.getbbox(
+ text, mode, direction, features, language, stroke_width, anchor
+ )
+ return bbox[0] + xy[0], bbox[1] + xy[1], bbox[2] + xy[0], bbox[3] + xy[1]
+
+ def multiline_textbbox(
+ self,
+ xy,
+ text,
+ font=None,
+ anchor=None,
+ spacing=4,
+ align="left",
+ direction=None,
+ features=None,
+ language=None,
+ stroke_width=0,
+ embedded_color=False,
+ *,
+ font_size=None,
+ ):
+ if direction == "ttb":
+ msg = "ttb direction is unsupported for multiline text"
+ raise ValueError(msg)
+
+ if anchor is None:
+ anchor = "la"
+ elif len(anchor) != 2:
+ msg = "anchor must be a 2 character string"
+ raise ValueError(msg)
+ elif anchor[1] in "tb":
+ msg = "anchor not supported for multiline text"
+ raise ValueError(msg)
+
+ if font is None:
+ font = self._getfont(font_size)
+
+ widths = []
+ max_width = 0
+ lines = self._multiline_split(text)
+ line_spacing = self._multiline_spacing(font, spacing, stroke_width)
+ for line in lines:
+ line_width = self.textlength(
+ line,
+ font,
+ direction=direction,
+ features=features,
+ language=language,
+ embedded_color=embedded_color,
+ )
+ widths.append(line_width)
+ max_width = max(max_width, line_width)
+
+ top = xy[1]
+ if anchor[1] == "m":
+ top -= (len(lines) - 1) * line_spacing / 2.0
+ elif anchor[1] == "d":
+ top -= (len(lines) - 1) * line_spacing
+
+ bbox = None
+
+ for idx, line in enumerate(lines):
+ left = xy[0]
+ width_difference = max_width - widths[idx]
+
+ # first align left by anchor
+ if anchor[0] == "m":
+ left -= width_difference / 2.0
+ elif anchor[0] == "r":
+ left -= width_difference
+
+ # then align by align parameter
+ if align == "left":
+ pass
+ elif align == "center":
+ left += width_difference / 2.0
+ elif align == "right":
+ left += width_difference
+ else:
+ msg = 'align must be "left", "center" or "right"'
+ raise ValueError(msg)
+
+ bbox_line = self.textbbox(
+ (left, top),
+ line,
+ font,
+ anchor,
+ direction=direction,
+ features=features,
+ language=language,
+ stroke_width=stroke_width,
+ embedded_color=embedded_color,
+ )
+ if bbox is None:
+ bbox = bbox_line
+ else:
+ bbox = (
+ min(bbox[0], bbox_line[0]),
+ min(bbox[1], bbox_line[1]),
+ max(bbox[2], bbox_line[2]),
+ max(bbox[3], bbox_line[3]),
+ )
+
+ top += line_spacing
+
+ if bbox is None:
+ return xy[0], xy[1], xy[0], xy[1]
+ return bbox
+
+
+def Draw(im, mode=None):
+ """
+ A simple 2D drawing interface for PIL images.
+
+ :param im: The image to draw in.
+ :param mode: Optional mode to use for color values. For RGB
+ images, this argument can be RGB or RGBA (to blend the
+ drawing into the image). For all other modes, this argument
+ must be the same as the image mode. If omitted, the mode
+ defaults to the mode of the image.
+ """
+ try:
+ return im.getdraw(mode)
+ except AttributeError:
+ return ImageDraw(im, mode)
+
+
+# experimental access to the outline API
+try:
+ Outline = Image.core.outline
+except AttributeError:
+ Outline = None
+
+
+def getdraw(im=None, hints=None):
+ """
+ (Experimental) A more advanced 2D drawing interface for PIL images,
+ based on the WCK interface.
+
+ :param im: The image to draw in.
+ :param hints: An optional list of hints.
+ :returns: A (drawing context, drawing resource factory) tuple.
+ """
+ # FIXME: this needs more work!
+ # FIXME: come up with a better 'hints' scheme.
+ handler = None
+ if not hints or "nicest" in hints:
+ try:
+ from . import _imagingagg as handler
+ except ImportError:
+ pass
+ if handler is None:
+ from . import ImageDraw2 as handler
+ if im:
+ im = handler.Draw(im)
+ return im, handler
+
+
+def floodfill(image, xy, value, border=None, thresh=0):
+ """
+ (experimental) Fills a bounded region with a given color.
+
+ :param image: Target image.
+ :param xy: Seed position (a 2-item coordinate tuple). See
+ :ref:`coordinate-system`.
+ :param value: Fill color.
+ :param border: Optional border value. If given, the region consists of
+ pixels with a color different from the border color. If not given,
+ the region consists of pixels having the same color as the seed
+ pixel.
+ :param thresh: Optional threshold value which specifies a maximum
+ tolerable difference of a pixel value from the 'background' in
+ order for it to be replaced. Useful for filling regions of
+ non-homogeneous, but similar, colors.
+ """
+ # based on an implementation by Eric S. Raymond
+ # amended by yo1995 @20180806
+ pixel = image.load()
+ x, y = xy
+ try:
+ background = pixel[x, y]
+ if _color_diff(value, background) <= thresh:
+ return # seed point already has fill color
+ pixel[x, y] = value
+ except (ValueError, IndexError):
+ return # seed point outside image
+ edge = {(x, y)}
+ # use a set to keep record of current and previous edge pixels
+ # to reduce memory consumption
+ full_edge = set()
+ while edge:
+ new_edge = set()
+ for x, y in edge: # 4 adjacent method
+ for s, t in ((x + 1, y), (x - 1, y), (x, y + 1), (x, y - 1)):
+ # If already processed, or if a coordinate is negative, skip
+ if (s, t) in full_edge or s < 0 or t < 0:
+ continue
+ try:
+ p = pixel[s, t]
+ except (ValueError, IndexError):
+ pass
+ else:
+ full_edge.add((s, t))
+ if border is None:
+ fill = _color_diff(p, background) <= thresh
+ else:
+ fill = p != value and p != border
+ if fill:
+ pixel[s, t] = value
+ new_edge.add((s, t))
+ full_edge = edge # discard pixels processed
+ edge = new_edge
+
+
+def _compute_regular_polygon_vertices(bounding_circle, n_sides, rotation):
+ """
+ Generate a list of vertices for a 2D regular polygon.
+
+ :param bounding_circle: The bounding circle is a tuple defined
+ by a point and radius. The polygon is inscribed in this circle.
+ (e.g. ``bounding_circle=(x, y, r)`` or ``((x, y), r)``)
+ :param n_sides: Number of sides
+ (e.g. ``n_sides=3`` for a triangle, ``6`` for a hexagon)
+ :param rotation: Apply an arbitrary rotation to the polygon
+ (e.g. ``rotation=90``, applies a 90 degree rotation)
+ :return: List of regular polygon vertices
+ (e.g. ``[(25, 50), (50, 50), (50, 25), (25, 25)]``)
+
+ How are the vertices computed?
+ 1. Compute the following variables
+ - theta: Angle between the apothem & the nearest polygon vertex
+ - side_length: Length of each polygon edge
+ - centroid: Center of bounding circle (1st, 2nd elements of bounding_circle)
+ - polygon_radius: Polygon radius (last element of bounding_circle)
+ - angles: Location of each polygon vertex in polar grid
+ (e.g. A square with 0 degree rotation => [225.0, 315.0, 45.0, 135.0])
+
+ 2. For each angle in angles, get the polygon vertex at that angle
+ The vertex is computed using the equation below.
+ X= xcos(φ) + ysin(φ)
+ Y= −xsin(φ) + ycos(φ)
+
+ Note:
+ φ = angle in degrees
+ x = 0
+ y = polygon_radius
+
+ The formula above assumes rotation around the origin.
+ In our case, we are rotating around the centroid.
+ To account for this, we use the formula below
+ X = xcos(φ) + ysin(φ) + centroid_x
+ Y = −xsin(φ) + ycos(φ) + centroid_y
+ """
+ # 1. Error Handling
+ # 1.1 Check `n_sides` has an appropriate value
+ if not isinstance(n_sides, int):
+ msg = "n_sides should be an int"
+ raise TypeError(msg)
+ if n_sides < 3:
+ msg = "n_sides should be an int > 2"
+ raise ValueError(msg)
+
+ # 1.2 Check `bounding_circle` has an appropriate value
+ if not isinstance(bounding_circle, (list, tuple)):
+ msg = "bounding_circle should be a tuple"
+ raise TypeError(msg)
+
+ if len(bounding_circle) == 3:
+ *centroid, polygon_radius = bounding_circle
+ elif len(bounding_circle) == 2:
+ centroid, polygon_radius = bounding_circle
+ else:
+ msg = (
+ "bounding_circle should contain 2D coordinates "
+ "and a radius (e.g. (x, y, r) or ((x, y), r) )"
+ )
+ raise ValueError(msg)
+
+ if not all(isinstance(i, (int, float)) for i in (*centroid, polygon_radius)):
+ msg = "bounding_circle should only contain numeric data"
+ raise ValueError(msg)
+
+ if not len(centroid) == 2:
+ msg = "bounding_circle centre should contain 2D coordinates (e.g. (x, y))"
+ raise ValueError(msg)
+
+ if polygon_radius <= 0:
+ msg = "bounding_circle radius should be > 0"
+ raise ValueError(msg)
+
+ # 1.3 Check `rotation` has an appropriate value
+ if not isinstance(rotation, (int, float)):
+ msg = "rotation should be an int or float"
+ raise ValueError(msg)
+
+ # 2. Define Helper Functions
+ def _apply_rotation(point, degrees, centroid):
+ return (
+ round(
+ point[0] * math.cos(math.radians(360 - degrees))
+ - point[1] * math.sin(math.radians(360 - degrees))
+ + centroid[0],
+ 2,
+ ),
+ round(
+ point[1] * math.cos(math.radians(360 - degrees))
+ + point[0] * math.sin(math.radians(360 - degrees))
+ + centroid[1],
+ 2,
+ ),
+ )
+
+ def _compute_polygon_vertex(centroid, polygon_radius, angle):
+ start_point = [polygon_radius, 0]
+ return _apply_rotation(start_point, angle, centroid)
+
+ def _get_angles(n_sides, rotation):
+ angles = []
+ degrees = 360 / n_sides
+ # Start with the bottom left polygon vertex
+ current_angle = (270 - 0.5 * degrees) + rotation
+ for _ in range(0, n_sides):
+ angles.append(current_angle)
+ current_angle += degrees
+ if current_angle > 360:
+ current_angle -= 360
+ return angles
+
+ # 3. Variable Declarations
+ angles = _get_angles(n_sides, rotation)
+
+ # 4. Compute Vertices
+ return [
+ _compute_polygon_vertex(centroid, polygon_radius, angle) for angle in angles
+ ]
+
+
+def _color_diff(color1, color2):
+ """
+ Uses 1-norm distance to calculate difference between two values.
+ """
+ if isinstance(color2, tuple):
+ return sum(abs(color1[i] - color2[i]) for i in range(0, len(color2)))
+ else:
+ return abs(color1 - color2)
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageDraw2.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageDraw2.py
new file mode 100644
index 00000000..7ce0224a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageDraw2.py
@@ -0,0 +1,193 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# WCK-style drawing interface operations
+#
+# History:
+# 2003-12-07 fl created
+# 2005-05-15 fl updated; added to PIL as ImageDraw2
+# 2005-05-15 fl added text support
+# 2005-05-20 fl added arc/chord/pieslice support
+#
+# Copyright (c) 2003-2005 by Secret Labs AB
+# Copyright (c) 2003-2005 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+
+"""
+(Experimental) WCK-style drawing interface operations
+
+.. seealso:: :py:mod:`PIL.ImageDraw`
+"""
+
+
+from . import Image, ImageColor, ImageDraw, ImageFont, ImagePath
+
+
+class Pen:
+ """Stores an outline color and width."""
+
+ def __init__(self, color, width=1, opacity=255):
+ self.color = ImageColor.getrgb(color)
+ self.width = width
+
+
+class Brush:
+ """Stores a fill color"""
+
+ def __init__(self, color, opacity=255):
+ self.color = ImageColor.getrgb(color)
+
+
+class Font:
+ """Stores a TrueType font and color"""
+
+ def __init__(self, color, file, size=12):
+ # FIXME: add support for bitmap fonts
+ self.color = ImageColor.getrgb(color)
+ self.font = ImageFont.truetype(file, size)
+
+
+class Draw:
+ """
+ (Experimental) WCK-style drawing interface
+ """
+
+ def __init__(self, image, size=None, color=None):
+ if not hasattr(image, "im"):
+ image = Image.new(image, size, color)
+ self.draw = ImageDraw.Draw(image)
+ self.image = image
+ self.transform = None
+
+ def flush(self):
+ return self.image
+
+ def render(self, op, xy, pen, brush=None):
+ # handle color arguments
+ outline = fill = None
+ width = 1
+ if isinstance(pen, Pen):
+ outline = pen.color
+ width = pen.width
+ elif isinstance(brush, Pen):
+ outline = brush.color
+ width = brush.width
+ if isinstance(brush, Brush):
+ fill = brush.color
+ elif isinstance(pen, Brush):
+ fill = pen.color
+ # handle transformation
+ if self.transform:
+ xy = ImagePath.Path(xy)
+ xy.transform(self.transform)
+ # render the item
+ if op == "line":
+ self.draw.line(xy, fill=outline, width=width)
+ else:
+ getattr(self.draw, op)(xy, fill=fill, outline=outline)
+
+ def settransform(self, offset):
+ """Sets a transformation offset."""
+ (xoffset, yoffset) = offset
+ self.transform = (1, 0, xoffset, 0, 1, yoffset)
+
+ def arc(self, xy, start, end, *options):
+ """
+ Draws an arc (a portion of a circle outline) between the start and end
+ angles, inside the given bounding box.
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.arc`
+ """
+ self.render("arc", xy, start, end, *options)
+
+ def chord(self, xy, start, end, *options):
+ """
+ Same as :py:meth:`~PIL.ImageDraw2.Draw.arc`, but connects the end points
+ with a straight line.
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.chord`
+ """
+ self.render("chord", xy, start, end, *options)
+
+ def ellipse(self, xy, *options):
+ """
+ Draws an ellipse inside the given bounding box.
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.ellipse`
+ """
+ self.render("ellipse", xy, *options)
+
+ def line(self, xy, *options):
+ """
+ Draws a line between the coordinates in the ``xy`` list.
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.line`
+ """
+ self.render("line", xy, *options)
+
+ def pieslice(self, xy, start, end, *options):
+ """
+ Same as arc, but also draws straight lines between the end points and the
+ center of the bounding box.
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.pieslice`
+ """
+ self.render("pieslice", xy, start, end, *options)
+
+ def polygon(self, xy, *options):
+ """
+ Draws a polygon.
+
+ The polygon outline consists of straight lines between the given
+ coordinates, plus a straight line between the last and the first
+ coordinate.
+
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.polygon`
+ """
+ self.render("polygon", xy, *options)
+
+ def rectangle(self, xy, *options):
+ """
+ Draws a rectangle.
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.rectangle`
+ """
+ self.render("rectangle", xy, *options)
+
+ def text(self, xy, text, font):
+ """
+ Draws the string at the given position.
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.text`
+ """
+ if self.transform:
+ xy = ImagePath.Path(xy)
+ xy.transform(self.transform)
+ self.draw.text(xy, text, font=font.font, fill=font.color)
+
+ def textbbox(self, xy, text, font):
+ """
+ Returns bounding box (in pixels) of given text.
+
+ :return: ``(left, top, right, bottom)`` bounding box
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.textbbox`
+ """
+ if self.transform:
+ xy = ImagePath.Path(xy)
+ xy.transform(self.transform)
+ return self.draw.textbbox(xy, text, font=font.font)
+
+ def textlength(self, text, font):
+ """
+ Returns length (in pixels) of given text.
+ This is the amount by which following text should be offset.
+
+ .. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.textlength`
+ """
+ return self.draw.textlength(text, font=font.font)
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageEnhance.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageEnhance.py
new file mode 100644
index 00000000..3b79d5c4
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageEnhance.py
@@ -0,0 +1,103 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# image enhancement classes
+#
+# For a background, see "Image Processing By Interpolation and
+# Extrapolation", Paul Haeberli and Douglas Voorhies. Available
+# at http://www.graficaobscura.com/interp/index.html
+#
+# History:
+# 1996-03-23 fl Created
+# 2009-06-16 fl Fixed mean calculation
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1996.
+#
+# See the README file for information on usage and redistribution.
+#
+
+from . import Image, ImageFilter, ImageStat
+
+
+class _Enhance:
+ def enhance(self, factor):
+ """
+ Returns an enhanced image.
+
+ :param factor: A floating point value controlling the enhancement.
+ Factor 1.0 always returns a copy of the original image,
+ lower factors mean less color (brightness, contrast,
+ etc), and higher values more. There are no restrictions
+ on this value.
+ :rtype: :py:class:`~PIL.Image.Image`
+ """
+ return Image.blend(self.degenerate, self.image, factor)
+
+
+class Color(_Enhance):
+ """Adjust image color balance.
+
+ This class can be used to adjust the colour balance of an image, in
+ a manner similar to the controls on a colour TV set. An enhancement
+ factor of 0.0 gives a black and white image. A factor of 1.0 gives
+ the original image.
+ """
+
+ def __init__(self, image):
+ self.image = image
+ self.intermediate_mode = "L"
+ if "A" in image.getbands():
+ self.intermediate_mode = "LA"
+
+ self.degenerate = image.convert(self.intermediate_mode).convert(image.mode)
+
+
+class Contrast(_Enhance):
+ """Adjust image contrast.
+
+ This class can be used to control the contrast of an image, similar
+ to the contrast control on a TV set. An enhancement factor of 0.0
+ gives a solid grey image. A factor of 1.0 gives the original image.
+ """
+
+ def __init__(self, image):
+ self.image = image
+ mean = int(ImageStat.Stat(image.convert("L")).mean[0] + 0.5)
+ self.degenerate = Image.new("L", image.size, mean).convert(image.mode)
+
+ if "A" in image.getbands():
+ self.degenerate.putalpha(image.getchannel("A"))
+
+
+class Brightness(_Enhance):
+ """Adjust image brightness.
+
+ This class can be used to control the brightness of an image. An
+ enhancement factor of 0.0 gives a black image. A factor of 1.0 gives the
+ original image.
+ """
+
+ def __init__(self, image):
+ self.image = image
+ self.degenerate = Image.new(image.mode, image.size, 0)
+
+ if "A" in image.getbands():
+ self.degenerate.putalpha(image.getchannel("A"))
+
+
+class Sharpness(_Enhance):
+ """Adjust image sharpness.
+
+ This class can be used to adjust the sharpness of an image. An
+ enhancement factor of 0.0 gives a blurred image, a factor of 1.0 gives the
+ original image, and a factor of 2.0 gives a sharpened image.
+ """
+
+ def __init__(self, image):
+ self.image = image
+ self.degenerate = image.filter(ImageFilter.SMOOTH)
+
+ if "A" in image.getbands():
+ self.degenerate.putalpha(image.getchannel("A"))
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageFile.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageFile.py
new file mode 100644
index 00000000..8e4f7dfb
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageFile.py
@@ -0,0 +1,773 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# base class for image file handlers
+#
+# history:
+# 1995-09-09 fl Created
+# 1996-03-11 fl Fixed load mechanism.
+# 1996-04-15 fl Added pcx/xbm decoders.
+# 1996-04-30 fl Added encoders.
+# 1996-12-14 fl Added load helpers
+# 1997-01-11 fl Use encode_to_file where possible
+# 1997-08-27 fl Flush output in _save
+# 1998-03-05 fl Use memory mapping for some modes
+# 1999-02-04 fl Use memory mapping also for "I;16" and "I;16B"
+# 1999-05-31 fl Added image parser
+# 2000-10-12 fl Set readonly flag on memory-mapped images
+# 2002-03-20 fl Use better messages for common decoder errors
+# 2003-04-21 fl Fall back on mmap/map_buffer if map is not available
+# 2003-10-30 fl Added StubImageFile class
+# 2004-02-25 fl Made incremental parser more robust
+#
+# Copyright (c) 1997-2004 by Secret Labs AB
+# Copyright (c) 1995-2004 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+import io
+import itertools
+import struct
+import sys
+
+from . import Image
+from ._util import is_path
+
+MAXBLOCK = 65536
+
+SAFEBLOCK = 1024 * 1024
+
+LOAD_TRUNCATED_IMAGES = False
+"""Whether or not to load truncated image files. User code may change this."""
+
+ERRORS = {
+ -1: "image buffer overrun error",
+ -2: "decoding error",
+ -3: "unknown error",
+ -8: "bad configuration",
+ -9: "out of memory error",
+}
+"""
+Dict of known error codes returned from :meth:`.PyDecoder.decode`,
+:meth:`.PyEncoder.encode` :meth:`.PyEncoder.encode_to_pyfd` and
+:meth:`.PyEncoder.encode_to_file`.
+"""
+
+
+#
+# --------------------------------------------------------------------
+# Helpers
+
+
+def raise_oserror(error):
+ try:
+ msg = Image.core.getcodecstatus(error)
+ except AttributeError:
+ msg = ERRORS.get(error)
+ if not msg:
+ msg = f"decoder error {error}"
+ msg += " when reading image file"
+ raise OSError(msg)
+
+
+def _tilesort(t):
+ # sort on offset
+ return t[2]
+
+
+#
+# --------------------------------------------------------------------
+# ImageFile base class
+
+
+class ImageFile(Image.Image):
+ """Base class for image file format handlers."""
+
+ def __init__(self, fp=None, filename=None):
+ super().__init__()
+
+ self._min_frame = 0
+
+ self.custom_mimetype = None
+
+ self.tile = None
+ """ A list of tile descriptors, or ``None`` """
+
+ self.readonly = 1 # until we know better
+
+ self.decoderconfig = ()
+ self.decodermaxblock = MAXBLOCK
+
+ if is_path(fp):
+ # filename
+ self.fp = open(fp, "rb")
+ self.filename = fp
+ self._exclusive_fp = True
+ else:
+ # stream
+ self.fp = fp
+ self.filename = filename
+ # can be overridden
+ self._exclusive_fp = None
+
+ try:
+ try:
+ self._open()
+ except (
+ IndexError, # end of data
+ TypeError, # end of data (ord)
+ KeyError, # unsupported mode
+ EOFError, # got header but not the first frame
+ struct.error,
+ ) as v:
+ raise SyntaxError(v) from v
+
+ if not self.mode or self.size[0] <= 0 or self.size[1] <= 0:
+ msg = "not identified by this driver"
+ raise SyntaxError(msg)
+ except BaseException:
+ # close the file only if we have opened it this constructor
+ if self._exclusive_fp:
+ self.fp.close()
+ raise
+
+ def get_format_mimetype(self):
+ if self.custom_mimetype:
+ return self.custom_mimetype
+ if self.format is not None:
+ return Image.MIME.get(self.format.upper())
+
+ def __setstate__(self, state):
+ self.tile = []
+ super().__setstate__(state)
+
+ def verify(self):
+ """Check file integrity"""
+
+ # raise exception if something's wrong. must be called
+ # directly after open, and closes file when finished.
+ if self._exclusive_fp:
+ self.fp.close()
+ self.fp = None
+
+ def load(self):
+ """Load image data based on tile list"""
+
+ if self.tile is None:
+ msg = "cannot load this image"
+ raise OSError(msg)
+
+ pixel = Image.Image.load(self)
+ if not self.tile:
+ return pixel
+
+ self.map = None
+ use_mmap = self.filename and len(self.tile) == 1
+ # As of pypy 2.1.0, memory mapping was failing here.
+ use_mmap = use_mmap and not hasattr(sys, "pypy_version_info")
+
+ readonly = 0
+
+ # look for read/seek overrides
+ try:
+ read = self.load_read
+ # don't use mmap if there are custom read/seek functions
+ use_mmap = False
+ except AttributeError:
+ read = self.fp.read
+
+ try:
+ seek = self.load_seek
+ use_mmap = False
+ except AttributeError:
+ seek = self.fp.seek
+
+ if use_mmap:
+ # try memory mapping
+ decoder_name, extents, offset, args = self.tile[0]
+ if (
+ decoder_name == "raw"
+ and len(args) >= 3
+ and args[0] == self.mode
+ and args[0] in Image._MAPMODES
+ ):
+ try:
+ # use mmap, if possible
+ import mmap
+
+ with open(self.filename) as fp:
+ self.map = mmap.mmap(fp.fileno(), 0, access=mmap.ACCESS_READ)
+ if offset + self.size[1] * args[1] > self.map.size():
+ # buffer is not large enough
+ raise OSError
+ self.im = Image.core.map_buffer(
+ self.map, self.size, decoder_name, offset, args
+ )
+ readonly = 1
+ # After trashing self.im,
+ # we might need to reload the palette data.
+ if self.palette:
+ self.palette.dirty = 1
+ except (AttributeError, OSError, ImportError):
+ self.map = None
+
+ self.load_prepare()
+ err_code = -3 # initialize to unknown error
+ if not self.map:
+ # sort tiles in file order
+ self.tile.sort(key=_tilesort)
+
+ try:
+ # FIXME: This is a hack to handle TIFF's JpegTables tag.
+ prefix = self.tile_prefix
+ except AttributeError:
+ prefix = b""
+
+ # Remove consecutive duplicates that only differ by their offset
+ self.tile = [
+ list(tiles)[-1]
+ for _, tiles in itertools.groupby(
+ self.tile, lambda tile: (tile[0], tile[1], tile[3])
+ )
+ ]
+ for decoder_name, extents, offset, args in self.tile:
+ seek(offset)
+ decoder = Image._getdecoder(
+ self.mode, decoder_name, args, self.decoderconfig
+ )
+ try:
+ decoder.setimage(self.im, extents)
+ if decoder.pulls_fd:
+ decoder.setfd(self.fp)
+ err_code = decoder.decode(b"")[1]
+ else:
+ b = prefix
+ while True:
+ try:
+ s = read(self.decodermaxblock)
+ except (IndexError, struct.error) as e:
+ # truncated png/gif
+ if LOAD_TRUNCATED_IMAGES:
+ break
+ else:
+ msg = "image file is truncated"
+ raise OSError(msg) from e
+
+ if not s: # truncated jpeg
+ if LOAD_TRUNCATED_IMAGES:
+ break
+ else:
+ msg = (
+ "image file is truncated "
+ f"({len(b)} bytes not processed)"
+ )
+ raise OSError(msg)
+
+ b = b + s
+ n, err_code = decoder.decode(b)
+ if n < 0:
+ break
+ b = b[n:]
+ finally:
+ # Need to cleanup here to prevent leaks
+ decoder.cleanup()
+
+ self.tile = []
+ self.readonly = readonly
+
+ self.load_end()
+
+ if self._exclusive_fp and self._close_exclusive_fp_after_loading:
+ self.fp.close()
+ self.fp = None
+
+ if not self.map and not LOAD_TRUNCATED_IMAGES and err_code < 0:
+ # still raised if decoder fails to return anything
+ raise_oserror(err_code)
+
+ return Image.Image.load(self)
+
+ def load_prepare(self):
+ # create image memory if necessary
+ if not self.im or self.im.mode != self.mode or self.im.size != self.size:
+ self.im = Image.core.new(self.mode, self.size)
+ # create palette (optional)
+ if self.mode == "P":
+ Image.Image.load(self)
+
+ def load_end(self):
+ # may be overridden
+ pass
+
+ # may be defined for contained formats
+ # def load_seek(self, pos):
+ # pass
+
+ # may be defined for blocked formats (e.g. PNG)
+ # def load_read(self, bytes):
+ # pass
+
+ def _seek_check(self, frame):
+ if (
+ frame < self._min_frame
+ # Only check upper limit on frames if additional seek operations
+ # are not required to do so
+ or (
+ not (hasattr(self, "_n_frames") and self._n_frames is None)
+ and frame >= self.n_frames + self._min_frame
+ )
+ ):
+ msg = "attempt to seek outside sequence"
+ raise EOFError(msg)
+
+ return self.tell() != frame
+
+
+class StubImageFile(ImageFile):
+ """
+ Base class for stub image loaders.
+
+ A stub loader is an image loader that can identify files of a
+ certain format, but relies on external code to load the file.
+ """
+
+ def _open(self):
+ msg = "StubImageFile subclass must implement _open"
+ raise NotImplementedError(msg)
+
+ def load(self):
+ loader = self._load()
+ if loader is None:
+ msg = f"cannot find loader for this {self.format} file"
+ raise OSError(msg)
+ image = loader.load(self)
+ assert image is not None
+ # become the other object (!)
+ self.__class__ = image.__class__
+ self.__dict__ = image.__dict__
+ return image.load()
+
+ def _load(self):
+ """(Hook) Find actual image loader."""
+ msg = "StubImageFile subclass must implement _load"
+ raise NotImplementedError(msg)
+
+
+class Parser:
+ """
+ Incremental image parser. This class implements the standard
+ feed/close consumer interface.
+ """
+
+ incremental = None
+ image = None
+ data = None
+ decoder = None
+ offset = 0
+ finished = 0
+
+ def reset(self):
+ """
+ (Consumer) Reset the parser. Note that you can only call this
+ method immediately after you've created a parser; parser
+ instances cannot be reused.
+ """
+ assert self.data is None, "cannot reuse parsers"
+
+ def feed(self, data):
+ """
+ (Consumer) Feed data to the parser.
+
+ :param data: A string buffer.
+ :exception OSError: If the parser failed to parse the image file.
+ """
+ # collect data
+
+ if self.finished:
+ return
+
+ if self.data is None:
+ self.data = data
+ else:
+ self.data = self.data + data
+
+ # parse what we have
+ if self.decoder:
+ if self.offset > 0:
+ # skip header
+ skip = min(len(self.data), self.offset)
+ self.data = self.data[skip:]
+ self.offset = self.offset - skip
+ if self.offset > 0 or not self.data:
+ return
+
+ n, e = self.decoder.decode(self.data)
+
+ if n < 0:
+ # end of stream
+ self.data = None
+ self.finished = 1
+ if e < 0:
+ # decoding error
+ self.image = None
+ raise_oserror(e)
+ else:
+ # end of image
+ return
+ self.data = self.data[n:]
+
+ elif self.image:
+ # if we end up here with no decoder, this file cannot
+ # be incrementally parsed. wait until we've gotten all
+ # available data
+ pass
+
+ else:
+ # attempt to open this file
+ try:
+ with io.BytesIO(self.data) as fp:
+ im = Image.open(fp)
+ except OSError:
+ # traceback.print_exc()
+ pass # not enough data
+ else:
+ flag = hasattr(im, "load_seek") or hasattr(im, "load_read")
+ if flag or len(im.tile) != 1:
+ # custom load code, or multiple tiles
+ self.decode = None
+ else:
+ # initialize decoder
+ im.load_prepare()
+ d, e, o, a = im.tile[0]
+ im.tile = []
+ self.decoder = Image._getdecoder(im.mode, d, a, im.decoderconfig)
+ self.decoder.setimage(im.im, e)
+
+ # calculate decoder offset
+ self.offset = o
+ if self.offset <= len(self.data):
+ self.data = self.data[self.offset :]
+ self.offset = 0
+
+ self.image = im
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
+
+ def close(self):
+ """
+ (Consumer) Close the stream.
+
+ :returns: An image object.
+ :exception OSError: If the parser failed to parse the image file either
+ because it cannot be identified or cannot be
+ decoded.
+ """
+ # finish decoding
+ if self.decoder:
+ # get rid of what's left in the buffers
+ self.feed(b"")
+ self.data = self.decoder = None
+ if not self.finished:
+ msg = "image was incomplete"
+ raise OSError(msg)
+ if not self.image:
+ msg = "cannot parse this image"
+ raise OSError(msg)
+ if self.data:
+ # incremental parsing not possible; reopen the file
+ # not that we have all data
+ with io.BytesIO(self.data) as fp:
+ try:
+ self.image = Image.open(fp)
+ finally:
+ self.image.load()
+ return self.image
+
+
+# --------------------------------------------------------------------
+
+
+def _save(im, fp, tile, bufsize=0):
+ """Helper to save image based on tile list
+
+ :param im: Image object.
+ :param fp: File object.
+ :param tile: Tile list.
+ :param bufsize: Optional buffer size
+ """
+
+ im.load()
+ if not hasattr(im, "encoderconfig"):
+ im.encoderconfig = ()
+ tile.sort(key=_tilesort)
+ # FIXME: make MAXBLOCK a configuration parameter
+ # It would be great if we could have the encoder specify what it needs
+ # But, it would need at least the image size in most cases. RawEncode is
+ # a tricky case.
+ bufsize = max(MAXBLOCK, bufsize, im.size[0] * 4) # see RawEncode.c
+ try:
+ fh = fp.fileno()
+ fp.flush()
+ _encode_tile(im, fp, tile, bufsize, fh)
+ except (AttributeError, io.UnsupportedOperation) as exc:
+ _encode_tile(im, fp, tile, bufsize, None, exc)
+ if hasattr(fp, "flush"):
+ fp.flush()
+
+
+def _encode_tile(im, fp, tile, bufsize, fh, exc=None):
+ for e, b, o, a in tile:
+ if o > 0:
+ fp.seek(o)
+ encoder = Image._getencoder(im.mode, e, a, im.encoderconfig)
+ try:
+ encoder.setimage(im.im, b)
+ if encoder.pushes_fd:
+ encoder.setfd(fp)
+ errcode = encoder.encode_to_pyfd()[1]
+ else:
+ if exc:
+ # compress to Python file-compatible object
+ while True:
+ errcode, data = encoder.encode(bufsize)[1:]
+ fp.write(data)
+ if errcode:
+ break
+ else:
+ # slight speedup: compress to real file object
+ errcode = encoder.encode_to_file(fh, bufsize)
+ if errcode < 0:
+ msg = f"encoder error {errcode} when writing image file"
+ raise OSError(msg) from exc
+ finally:
+ encoder.cleanup()
+
+
+def _safe_read(fp, size):
+ """
+ Reads large blocks in a safe way. Unlike fp.read(n), this function
+ doesn't trust the user. If the requested size is larger than
+ SAFEBLOCK, the file is read block by block.
+
+ :param fp: File handle. Must implement a read method.
+ :param size: Number of bytes to read.
+ :returns: A string containing size bytes of data.
+
+ Raises an OSError if the file is truncated and the read cannot be completed
+
+ """
+ if size <= 0:
+ return b""
+ if size <= SAFEBLOCK:
+ data = fp.read(size)
+ if len(data) < size:
+ msg = "Truncated File Read"
+ raise OSError(msg)
+ return data
+ data = []
+ remaining_size = size
+ while remaining_size > 0:
+ block = fp.read(min(remaining_size, SAFEBLOCK))
+ if not block:
+ break
+ data.append(block)
+ remaining_size -= len(block)
+ if sum(len(d) for d in data) < size:
+ msg = "Truncated File Read"
+ raise OSError(msg)
+ return b"".join(data)
+
+
+class PyCodecState:
+ def __init__(self):
+ self.xsize = 0
+ self.ysize = 0
+ self.xoff = 0
+ self.yoff = 0
+
+ def extents(self):
+ return self.xoff, self.yoff, self.xoff + self.xsize, self.yoff + self.ysize
+
+
+class PyCodec:
+ def __init__(self, mode, *args):
+ self.im = None
+ self.state = PyCodecState()
+ self.fd = None
+ self.mode = mode
+ self.init(args)
+
+ def init(self, args):
+ """
+ Override to perform codec specific initialization
+
+ :param args: Array of args items from the tile entry
+ :returns: None
+ """
+ self.args = args
+
+ def cleanup(self):
+ """
+ Override to perform codec specific cleanup
+
+ :returns: None
+ """
+ pass
+
+ def setfd(self, fd):
+ """
+ Called from ImageFile to set the Python file-like object
+
+ :param fd: A Python file-like object
+ :returns: None
+ """
+ self.fd = fd
+
+ def setimage(self, im, extents=None):
+ """
+ Called from ImageFile to set the core output image for the codec
+
+ :param im: A core image object
+ :param extents: a 4 tuple of (x0, y0, x1, y1) defining the rectangle
+ for this tile
+ :returns: None
+ """
+
+ # following c code
+ self.im = im
+
+ if extents:
+ (x0, y0, x1, y1) = extents
+ else:
+ (x0, y0, x1, y1) = (0, 0, 0, 0)
+
+ if x0 == 0 and x1 == 0:
+ self.state.xsize, self.state.ysize = self.im.size
+ else:
+ self.state.xoff = x0
+ self.state.yoff = y0
+ self.state.xsize = x1 - x0
+ self.state.ysize = y1 - y0
+
+ if self.state.xsize <= 0 or self.state.ysize <= 0:
+ msg = "Size cannot be negative"
+ raise ValueError(msg)
+
+ if (
+ self.state.xsize + self.state.xoff > self.im.size[0]
+ or self.state.ysize + self.state.yoff > self.im.size[1]
+ ):
+ msg = "Tile cannot extend outside image"
+ raise ValueError(msg)
+
+
+class PyDecoder(PyCodec):
+ """
+ Python implementation of a format decoder. Override this class and
+ add the decoding logic in the :meth:`decode` method.
+
+ See :ref:`Writing Your Own File Codec in Python`
+ """
+
+ _pulls_fd = False
+
+ @property
+ def pulls_fd(self):
+ return self._pulls_fd
+
+ def decode(self, buffer):
+ """
+ Override to perform the decoding process.
+
+ :param buffer: A bytes object with the data to be decoded.
+ :returns: A tuple of ``(bytes consumed, errcode)``.
+ If finished with decoding return -1 for the bytes consumed.
+ Err codes are from :data:`.ImageFile.ERRORS`.
+ """
+ raise NotImplementedError()
+
+ def set_as_raw(self, data, rawmode=None):
+ """
+ Convenience method to set the internal image from a stream of raw data
+
+ :param data: Bytes to be set
+ :param rawmode: The rawmode to be used for the decoder.
+ If not specified, it will default to the mode of the image
+ :returns: None
+ """
+
+ if not rawmode:
+ rawmode = self.mode
+ d = Image._getdecoder(self.mode, "raw", rawmode)
+ d.setimage(self.im, self.state.extents())
+ s = d.decode(data)
+
+ if s[0] >= 0:
+ msg = "not enough image data"
+ raise ValueError(msg)
+ if s[1] != 0:
+ msg = "cannot decode image data"
+ raise ValueError(msg)
+
+
+class PyEncoder(PyCodec):
+ """
+ Python implementation of a format encoder. Override this class and
+ add the decoding logic in the :meth:`encode` method.
+
+ See :ref:`Writing Your Own File Codec in Python`
+ """
+
+ _pushes_fd = False
+
+ @property
+ def pushes_fd(self):
+ return self._pushes_fd
+
+ def encode(self, bufsize):
+ """
+ Override to perform the encoding process.
+
+ :param bufsize: Buffer size.
+ :returns: A tuple of ``(bytes encoded, errcode, bytes)``.
+ If finished with encoding return 1 for the error code.
+ Err codes are from :data:`.ImageFile.ERRORS`.
+ """
+ raise NotImplementedError()
+
+ def encode_to_pyfd(self):
+ """
+ If ``pushes_fd`` is ``True``, then this method will be used,
+ and ``encode()`` will only be called once.
+
+ :returns: A tuple of ``(bytes consumed, errcode)``.
+ Err codes are from :data:`.ImageFile.ERRORS`.
+ """
+ if not self.pushes_fd:
+ return 0, -8 # bad configuration
+ bytes_consumed, errcode, data = self.encode(0)
+ if data:
+ self.fd.write(data)
+ return bytes_consumed, errcode
+
+ def encode_to_file(self, fh, bufsize):
+ """
+ :param fh: File handle.
+ :param bufsize: Buffer size.
+
+ :returns: If finished successfully, return 0.
+ Otherwise, return an error code. Err codes are from
+ :data:`.ImageFile.ERRORS`.
+ """
+ errcode = 0
+ while errcode == 0:
+ status, errcode, buf = self.encode(bufsize)
+ if status > 0:
+ fh.write(buf[status:])
+ return errcode
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageFilter.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageFilter.py
new file mode 100644
index 00000000..57268b8f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageFilter.py
@@ -0,0 +1,566 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# standard filters
+#
+# History:
+# 1995-11-27 fl Created
+# 2002-06-08 fl Added rank and mode filters
+# 2003-09-15 fl Fixed rank calculation in rank filter; added expand call
+#
+# Copyright (c) 1997-2003 by Secret Labs AB.
+# Copyright (c) 1995-2002 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+import functools
+
+
+class Filter:
+ pass
+
+
+class MultibandFilter(Filter):
+ pass
+
+
+class BuiltinFilter(MultibandFilter):
+ def filter(self, image):
+ if image.mode == "P":
+ msg = "cannot filter palette images"
+ raise ValueError(msg)
+ return image.filter(*self.filterargs)
+
+
+class Kernel(BuiltinFilter):
+ """
+ Create a convolution kernel. The current version only
+ supports 3x3 and 5x5 integer and floating point kernels.
+
+ In the current version, kernels can only be applied to
+ "L" and "RGB" images.
+
+ :param size: Kernel size, given as (width, height). In the current
+ version, this must be (3,3) or (5,5).
+ :param kernel: A sequence containing kernel weights. The kernel will
+ be flipped vertically before being applied to the image.
+ :param scale: Scale factor. If given, the result for each pixel is
+ divided by this value. The default is the sum of the
+ kernel weights.
+ :param offset: Offset. If given, this value is added to the result,
+ after it has been divided by the scale factor.
+ """
+
+ name = "Kernel"
+
+ def __init__(self, size, kernel, scale=None, offset=0):
+ if scale is None:
+ # default scale is sum of kernel
+ scale = functools.reduce(lambda a, b: a + b, kernel)
+ if size[0] * size[1] != len(kernel):
+ msg = "not enough coefficients in kernel"
+ raise ValueError(msg)
+ self.filterargs = size, scale, offset, kernel
+
+
+class RankFilter(Filter):
+ """
+ Create a rank filter. The rank filter sorts all pixels in
+ a window of the given size, and returns the ``rank``'th value.
+
+ :param size: The kernel size, in pixels.
+ :param rank: What pixel value to pick. Use 0 for a min filter,
+ ``size * size / 2`` for a median filter, ``size * size - 1``
+ for a max filter, etc.
+ """
+
+ name = "Rank"
+
+ def __init__(self, size, rank):
+ self.size = size
+ self.rank = rank
+
+ def filter(self, image):
+ if image.mode == "P":
+ msg = "cannot filter palette images"
+ raise ValueError(msg)
+ image = image.expand(self.size // 2, self.size // 2)
+ return image.rankfilter(self.size, self.rank)
+
+
+class MedianFilter(RankFilter):
+ """
+ Create a median filter. Picks the median pixel value in a window with the
+ given size.
+
+ :param size: The kernel size, in pixels.
+ """
+
+ name = "Median"
+
+ def __init__(self, size=3):
+ self.size = size
+ self.rank = size * size // 2
+
+
+class MinFilter(RankFilter):
+ """
+ Create a min filter. Picks the lowest pixel value in a window with the
+ given size.
+
+ :param size: The kernel size, in pixels.
+ """
+
+ name = "Min"
+
+ def __init__(self, size=3):
+ self.size = size
+ self.rank = 0
+
+
+class MaxFilter(RankFilter):
+ """
+ Create a max filter. Picks the largest pixel value in a window with the
+ given size.
+
+ :param size: The kernel size, in pixels.
+ """
+
+ name = "Max"
+
+ def __init__(self, size=3):
+ self.size = size
+ self.rank = size * size - 1
+
+
+class ModeFilter(Filter):
+ """
+ Create a mode filter. Picks the most frequent pixel value in a box with the
+ given size. Pixel values that occur only once or twice are ignored; if no
+ pixel value occurs more than twice, the original pixel value is preserved.
+
+ :param size: The kernel size, in pixels.
+ """
+
+ name = "Mode"
+
+ def __init__(self, size=3):
+ self.size = size
+
+ def filter(self, image):
+ return image.modefilter(self.size)
+
+
+class GaussianBlur(MultibandFilter):
+ """Blurs the image with a sequence of extended box filters, which
+ approximates a Gaussian kernel. For details on accuracy see
+
+
+ :param radius: Standard deviation of the Gaussian kernel. Either a sequence of two
+ numbers for x and y, or a single number for both.
+ """
+
+ name = "GaussianBlur"
+
+ def __init__(self, radius=2):
+ self.radius = radius
+
+ def filter(self, image):
+ xy = self.radius
+ if not isinstance(xy, (tuple, list)):
+ xy = (xy, xy)
+ if xy == (0, 0):
+ return image.copy()
+ return image.gaussian_blur(xy)
+
+
+class BoxBlur(MultibandFilter):
+ """Blurs the image by setting each pixel to the average value of the pixels
+ in a square box extending radius pixels in each direction.
+ Supports float radius of arbitrary size. Uses an optimized implementation
+ which runs in linear time relative to the size of the image
+ for any radius value.
+
+ :param radius: Size of the box in a direction. Either a sequence of two numbers for
+ x and y, or a single number for both.
+
+ Radius 0 does not blur, returns an identical image.
+ Radius 1 takes 1 pixel in each direction, i.e. 9 pixels in total.
+ """
+
+ name = "BoxBlur"
+
+ def __init__(self, radius):
+ xy = radius
+ if not isinstance(xy, (tuple, list)):
+ xy = (xy, xy)
+ if xy[0] < 0 or xy[1] < 0:
+ msg = "radius must be >= 0"
+ raise ValueError(msg)
+ self.radius = radius
+
+ def filter(self, image):
+ xy = self.radius
+ if not isinstance(xy, (tuple, list)):
+ xy = (xy, xy)
+ if xy == (0, 0):
+ return image.copy()
+ return image.box_blur(xy)
+
+
+class UnsharpMask(MultibandFilter):
+ """Unsharp mask filter.
+
+ See Wikipedia's entry on `digital unsharp masking`_ for an explanation of
+ the parameters.
+
+ :param radius: Blur Radius
+ :param percent: Unsharp strength, in percent
+ :param threshold: Threshold controls the minimum brightness change that
+ will be sharpened
+
+ .. _digital unsharp masking: https://en.wikipedia.org/wiki/Unsharp_masking#Digital_unsharp_masking
+
+ """ # noqa: E501
+
+ name = "UnsharpMask"
+
+ def __init__(self, radius=2, percent=150, threshold=3):
+ self.radius = radius
+ self.percent = percent
+ self.threshold = threshold
+
+ def filter(self, image):
+ return image.unsharp_mask(self.radius, self.percent, self.threshold)
+
+
+class BLUR(BuiltinFilter):
+ name = "Blur"
+ # fmt: off
+ filterargs = (5, 5), 16, 0, (
+ 1, 1, 1, 1, 1,
+ 1, 0, 0, 0, 1,
+ 1, 0, 0, 0, 1,
+ 1, 0, 0, 0, 1,
+ 1, 1, 1, 1, 1,
+ )
+ # fmt: on
+
+
+class CONTOUR(BuiltinFilter):
+ name = "Contour"
+ # fmt: off
+ filterargs = (3, 3), 1, 255, (
+ -1, -1, -1,
+ -1, 8, -1,
+ -1, -1, -1,
+ )
+ # fmt: on
+
+
+class DETAIL(BuiltinFilter):
+ name = "Detail"
+ # fmt: off
+ filterargs = (3, 3), 6, 0, (
+ 0, -1, 0,
+ -1, 10, -1,
+ 0, -1, 0,
+ )
+ # fmt: on
+
+
+class EDGE_ENHANCE(BuiltinFilter):
+ name = "Edge-enhance"
+ # fmt: off
+ filterargs = (3, 3), 2, 0, (
+ -1, -1, -1,
+ -1, 10, -1,
+ -1, -1, -1,
+ )
+ # fmt: on
+
+
+class EDGE_ENHANCE_MORE(BuiltinFilter):
+ name = "Edge-enhance More"
+ # fmt: off
+ filterargs = (3, 3), 1, 0, (
+ -1, -1, -1,
+ -1, 9, -1,
+ -1, -1, -1,
+ )
+ # fmt: on
+
+
+class EMBOSS(BuiltinFilter):
+ name = "Emboss"
+ # fmt: off
+ filterargs = (3, 3), 1, 128, (
+ -1, 0, 0,
+ 0, 1, 0,
+ 0, 0, 0,
+ )
+ # fmt: on
+
+
+class FIND_EDGES(BuiltinFilter):
+ name = "Find Edges"
+ # fmt: off
+ filterargs = (3, 3), 1, 0, (
+ -1, -1, -1,
+ -1, 8, -1,
+ -1, -1, -1,
+ )
+ # fmt: on
+
+
+class SHARPEN(BuiltinFilter):
+ name = "Sharpen"
+ # fmt: off
+ filterargs = (3, 3), 16, 0, (
+ -2, -2, -2,
+ -2, 32, -2,
+ -2, -2, -2,
+ )
+ # fmt: on
+
+
+class SMOOTH(BuiltinFilter):
+ name = "Smooth"
+ # fmt: off
+ filterargs = (3, 3), 13, 0, (
+ 1, 1, 1,
+ 1, 5, 1,
+ 1, 1, 1,
+ )
+ # fmt: on
+
+
+class SMOOTH_MORE(BuiltinFilter):
+ name = "Smooth More"
+ # fmt: off
+ filterargs = (5, 5), 100, 0, (
+ 1, 1, 1, 1, 1,
+ 1, 5, 5, 5, 1,
+ 1, 5, 44, 5, 1,
+ 1, 5, 5, 5, 1,
+ 1, 1, 1, 1, 1,
+ )
+ # fmt: on
+
+
+class Color3DLUT(MultibandFilter):
+ """Three-dimensional color lookup table.
+
+ Transforms 3-channel pixels using the values of the channels as coordinates
+ in the 3D lookup table and interpolating the nearest elements.
+
+ This method allows you to apply almost any color transformation
+ in constant time by using pre-calculated decimated tables.
+
+ .. versionadded:: 5.2.0
+
+ :param size: Size of the table. One int or tuple of (int, int, int).
+ Minimal size in any dimension is 2, maximum is 65.
+ :param table: Flat lookup table. A list of ``channels * size**3``
+ float elements or a list of ``size**3`` channels-sized
+ tuples with floats. Channels are changed first,
+ then first dimension, then second, then third.
+ Value 0.0 corresponds lowest value of output, 1.0 highest.
+ :param channels: Number of channels in the table. Could be 3 or 4.
+ Default is 3.
+ :param target_mode: A mode for the result image. Should have not less
+ than ``channels`` channels. Default is ``None``,
+ which means that mode wouldn't be changed.
+ """
+
+ name = "Color 3D LUT"
+
+ def __init__(self, size, table, channels=3, target_mode=None, **kwargs):
+ if channels not in (3, 4):
+ msg = "Only 3 or 4 output channels are supported"
+ raise ValueError(msg)
+ self.size = size = self._check_size(size)
+ self.channels = channels
+ self.mode = target_mode
+
+ # Hidden flag `_copy_table=False` could be used to avoid extra copying
+ # of the table if the table is specially made for the constructor.
+ copy_table = kwargs.get("_copy_table", True)
+ items = size[0] * size[1] * size[2]
+ wrong_size = False
+
+ numpy = None
+ if hasattr(table, "shape"):
+ try:
+ import numpy
+ except ImportError: # pragma: no cover
+ pass
+
+ if numpy and isinstance(table, numpy.ndarray):
+ if copy_table:
+ table = table.copy()
+
+ if table.shape in [
+ (items * channels,),
+ (items, channels),
+ (size[2], size[1], size[0], channels),
+ ]:
+ table = table.reshape(items * channels)
+ else:
+ wrong_size = True
+
+ else:
+ if copy_table:
+ table = list(table)
+
+ # Convert to a flat list
+ if table and isinstance(table[0], (list, tuple)):
+ table, raw_table = [], table
+ for pixel in raw_table:
+ if len(pixel) != channels:
+ msg = (
+ "The elements of the table should "
+ f"have a length of {channels}."
+ )
+ raise ValueError(msg)
+ table.extend(pixel)
+
+ if wrong_size or len(table) != items * channels:
+ msg = (
+ "The table should have either channels * size**3 float items "
+ "or size**3 items of channels-sized tuples with floats. "
+ f"Table should be: {channels}x{size[0]}x{size[1]}x{size[2]}. "
+ f"Actual length: {len(table)}"
+ )
+ raise ValueError(msg)
+ self.table = table
+
+ @staticmethod
+ def _check_size(size):
+ try:
+ _, _, _ = size
+ except ValueError as e:
+ msg = "Size should be either an integer or a tuple of three integers."
+ raise ValueError(msg) from e
+ except TypeError:
+ size = (size, size, size)
+ size = [int(x) for x in size]
+ for size_1d in size:
+ if not 2 <= size_1d <= 65:
+ msg = "Size should be in [2, 65] range."
+ raise ValueError(msg)
+ return size
+
+ @classmethod
+ def generate(cls, size, callback, channels=3, target_mode=None):
+ """Generates new LUT using provided callback.
+
+ :param size: Size of the table. Passed to the constructor.
+ :param callback: Function with three parameters which correspond
+ three color channels. Will be called ``size**3``
+ times with values from 0.0 to 1.0 and should return
+ a tuple with ``channels`` elements.
+ :param channels: The number of channels which should return callback.
+ :param target_mode: Passed to the constructor of the resulting
+ lookup table.
+ """
+ size_1d, size_2d, size_3d = cls._check_size(size)
+ if channels not in (3, 4):
+ msg = "Only 3 or 4 output channels are supported"
+ raise ValueError(msg)
+
+ table = [0] * (size_1d * size_2d * size_3d * channels)
+ idx_out = 0
+ for b in range(size_3d):
+ for g in range(size_2d):
+ for r in range(size_1d):
+ table[idx_out : idx_out + channels] = callback(
+ r / (size_1d - 1), g / (size_2d - 1), b / (size_3d - 1)
+ )
+ idx_out += channels
+
+ return cls(
+ (size_1d, size_2d, size_3d),
+ table,
+ channels=channels,
+ target_mode=target_mode,
+ _copy_table=False,
+ )
+
+ def transform(self, callback, with_normals=False, channels=None, target_mode=None):
+ """Transforms the table values using provided callback and returns
+ a new LUT with altered values.
+
+ :param callback: A function which takes old lookup table values
+ and returns a new set of values. The number
+ of arguments which function should take is
+ ``self.channels`` or ``3 + self.channels``
+ if ``with_normals`` flag is set.
+ Should return a tuple of ``self.channels`` or
+ ``channels`` elements if it is set.
+ :param with_normals: If true, ``callback`` will be called with
+ coordinates in the color cube as the first
+ three arguments. Otherwise, ``callback``
+ will be called only with actual color values.
+ :param channels: The number of channels in the resulting lookup table.
+ :param target_mode: Passed to the constructor of the resulting
+ lookup table.
+ """
+ if channels not in (None, 3, 4):
+ msg = "Only 3 or 4 output channels are supported"
+ raise ValueError(msg)
+ ch_in = self.channels
+ ch_out = channels or ch_in
+ size_1d, size_2d, size_3d = self.size
+
+ table = [0] * (size_1d * size_2d * size_3d * ch_out)
+ idx_in = 0
+ idx_out = 0
+ for b in range(size_3d):
+ for g in range(size_2d):
+ for r in range(size_1d):
+ values = self.table[idx_in : idx_in + ch_in]
+ if with_normals:
+ values = callback(
+ r / (size_1d - 1),
+ g / (size_2d - 1),
+ b / (size_3d - 1),
+ *values,
+ )
+ else:
+ values = callback(*values)
+ table[idx_out : idx_out + ch_out] = values
+ idx_in += ch_in
+ idx_out += ch_out
+
+ return type(self)(
+ self.size,
+ table,
+ channels=ch_out,
+ target_mode=target_mode or self.mode,
+ _copy_table=False,
+ )
+
+ def __repr__(self):
+ r = [
+ f"{self.__class__.__name__} from {self.table.__class__.__name__}",
+ "size={:d}x{:d}x{:d}".format(*self.size),
+ f"channels={self.channels:d}",
+ ]
+ if self.mode:
+ r.append(f"target_mode={self.mode}")
+ return "<{}>".format(" ".join(r))
+
+ def filter(self, image):
+ from . import Image
+
+ return image.color_lut_3d(
+ self.mode or image.mode,
+ Image.Resampling.BILINEAR,
+ self.channels,
+ self.size[0],
+ self.size[1],
+ self.size[2],
+ self.table,
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageFont.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageFont.py
new file mode 100644
index 00000000..c2956213
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageFont.py
@@ -0,0 +1,1242 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# PIL raster font management
+#
+# History:
+# 1996-08-07 fl created (experimental)
+# 1997-08-25 fl minor adjustments to handle fonts from pilfont 0.3
+# 1999-02-06 fl rewrote most font management stuff in C
+# 1999-03-17 fl take pth files into account in load_path (from Richard Jones)
+# 2001-02-17 fl added freetype support
+# 2001-05-09 fl added TransposedFont wrapper class
+# 2002-03-04 fl make sure we have a "L" or "1" font
+# 2002-12-04 fl skip non-directory entries in the system path
+# 2003-04-29 fl add embedded default font
+# 2003-09-27 fl added support for truetype charmap encodings
+#
+# Todo:
+# Adapt to PILFONT2 format (16-bit fonts, compressed, single file)
+#
+# Copyright (c) 1997-2003 by Secret Labs AB
+# Copyright (c) 1996-2003 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+import base64
+import os
+import sys
+import warnings
+from enum import IntEnum
+from io import BytesIO
+
+from . import Image
+from ._util import is_directory, is_path
+
+
+class Layout(IntEnum):
+ BASIC = 0
+ RAQM = 1
+
+
+MAX_STRING_LENGTH = 1_000_000
+
+
+try:
+ from . import _imagingft as core
+except ImportError as ex:
+ from ._util import DeferredError
+
+ core = DeferredError(ex)
+
+
+def _string_length_check(text):
+ if MAX_STRING_LENGTH is not None and len(text) > MAX_STRING_LENGTH:
+ msg = "too many characters in string"
+ raise ValueError(msg)
+
+
+# FIXME: add support for pilfont2 format (see FontFile.py)
+
+# --------------------------------------------------------------------
+# Font metrics format:
+# "PILfont" LF
+# fontdescriptor LF
+# (optional) key=value... LF
+# "DATA" LF
+# binary data: 256*10*2 bytes (dx, dy, dstbox, srcbox)
+#
+# To place a character, cut out srcbox and paste at dstbox,
+# relative to the character position. Then move the character
+# position according to dx, dy.
+# --------------------------------------------------------------------
+
+
+class ImageFont:
+ """PIL font wrapper"""
+
+ def _load_pilfont(self, filename):
+ with open(filename, "rb") as fp:
+ image = None
+ for ext in (".png", ".gif", ".pbm"):
+ if image:
+ image.close()
+ try:
+ fullname = os.path.splitext(filename)[0] + ext
+ image = Image.open(fullname)
+ except Exception:
+ pass
+ else:
+ if image and image.mode in ("1", "L"):
+ break
+ else:
+ if image:
+ image.close()
+ msg = "cannot find glyph data file"
+ raise OSError(msg)
+
+ self.file = fullname
+
+ self._load_pilfont_data(fp, image)
+ image.close()
+
+ def _load_pilfont_data(self, file, image):
+ # read PILfont header
+ if file.readline() != b"PILfont\n":
+ msg = "Not a PILfont file"
+ raise SyntaxError(msg)
+ file.readline().split(b";")
+ self.info = [] # FIXME: should be a dictionary
+ while True:
+ s = file.readline()
+ if not s or s == b"DATA\n":
+ break
+ self.info.append(s)
+
+ # read PILfont metrics
+ data = file.read(256 * 20)
+
+ # check image
+ if image.mode not in ("1", "L"):
+ msg = "invalid font image mode"
+ raise TypeError(msg)
+
+ image.load()
+
+ self.font = Image.core.font(image.im, data)
+
+ def getmask(self, text, mode="", *args, **kwargs):
+ """
+ Create a bitmap for the text.
+
+ If the font uses antialiasing, the bitmap should have mode ``L`` and use a
+ maximum value of 255. Otherwise, it should have mode ``1``.
+
+ :param text: Text to render.
+ :param mode: Used by some graphics drivers to indicate what mode the
+ driver prefers; if empty, the renderer may return either
+ mode. Note that the mode is always a string, to simplify
+ C-level implementations.
+
+ .. versionadded:: 1.1.5
+
+ :return: An internal PIL storage memory instance as defined by the
+ :py:mod:`PIL.Image.core` interface module.
+ """
+ return self.font.getmask(text, mode)
+
+ def getbbox(self, text, *args, **kwargs):
+ """
+ Returns bounding box (in pixels) of given text.
+
+ .. versionadded:: 9.2.0
+
+ :param text: Text to render.
+ :param mode: Used by some graphics drivers to indicate what mode the
+ driver prefers; if empty, the renderer may return either
+ mode. Note that the mode is always a string, to simplify
+ C-level implementations.
+
+ :return: ``(left, top, right, bottom)`` bounding box
+ """
+ _string_length_check(text)
+ width, height = self.font.getsize(text)
+ return 0, 0, width, height
+
+ def getlength(self, text, *args, **kwargs):
+ """
+ Returns length (in pixels) of given text.
+ This is the amount by which following text should be offset.
+
+ .. versionadded:: 9.2.0
+ """
+ _string_length_check(text)
+ width, height = self.font.getsize(text)
+ return width
+
+
+##
+# Wrapper for FreeType fonts. Application code should use the
+# truetype factory function to create font objects.
+
+
+class FreeTypeFont:
+ """FreeType font wrapper (requires _imagingft service)"""
+
+ def __init__(self, font=None, size=10, index=0, encoding="", layout_engine=None):
+ # FIXME: use service provider instead
+
+ self.path = font
+ self.size = size
+ self.index = index
+ self.encoding = encoding
+
+ if layout_engine not in (Layout.BASIC, Layout.RAQM):
+ layout_engine = Layout.BASIC
+ if core.HAVE_RAQM:
+ layout_engine = Layout.RAQM
+ elif layout_engine == Layout.RAQM and not core.HAVE_RAQM:
+ warnings.warn(
+ "Raqm layout was requested, but Raqm is not available. "
+ "Falling back to basic layout."
+ )
+ layout_engine = Layout.BASIC
+
+ self.layout_engine = layout_engine
+
+ def load_from_bytes(f):
+ self.font_bytes = f.read()
+ self.font = core.getfont(
+ "", size, index, encoding, self.font_bytes, layout_engine
+ )
+
+ if is_path(font):
+ if sys.platform == "win32":
+ font_bytes_path = font if isinstance(font, bytes) else font.encode()
+ try:
+ font_bytes_path.decode("ascii")
+ except UnicodeDecodeError:
+ # FreeType cannot load fonts with non-ASCII characters on Windows
+ # So load it into memory first
+ with open(font, "rb") as f:
+ load_from_bytes(f)
+ return
+ self.font = core.getfont(
+ font, size, index, encoding, layout_engine=layout_engine
+ )
+ else:
+ load_from_bytes(font)
+
+ def __getstate__(self):
+ return [self.path, self.size, self.index, self.encoding, self.layout_engine]
+
+ def __setstate__(self, state):
+ path, size, index, encoding, layout_engine = state
+ self.__init__(path, size, index, encoding, layout_engine)
+
+ def getname(self):
+ """
+ :return: A tuple of the font family (e.g. Helvetica) and the font style
+ (e.g. Bold)
+ """
+ return self.font.family, self.font.style
+
+ def getmetrics(self):
+ """
+ :return: A tuple of the font ascent (the distance from the baseline to
+ the highest outline point) and descent (the distance from the
+ baseline to the lowest outline point, a negative value)
+ """
+ return self.font.ascent, self.font.descent
+
+ def getlength(self, text, mode="", direction=None, features=None, language=None):
+ """
+ Returns length (in pixels with 1/64 precision) of given text when rendered
+ in font with provided direction, features, and language.
+
+ This is the amount by which following text should be offset.
+ Text bounding box may extend past the length in some fonts,
+ e.g. when using italics or accents.
+
+ The result is returned as a float; it is a whole number if using basic layout.
+
+ Note that the sum of two lengths may not equal the length of a concatenated
+ string due to kerning. If you need to adjust for kerning, include the following
+ character and subtract its length.
+
+ For example, instead of ::
+
+ hello = font.getlength("Hello")
+ world = font.getlength("World")
+ hello_world = hello + world # not adjusted for kerning
+ assert hello_world == font.getlength("HelloWorld") # may fail
+
+ use ::
+
+ hello = font.getlength("HelloW") - font.getlength("W") # adjusted for kerning
+ world = font.getlength("World")
+ hello_world = hello + world # adjusted for kerning
+ assert hello_world == font.getlength("HelloWorld") # True
+
+ or disable kerning with (requires libraqm) ::
+
+ hello = draw.textlength("Hello", font, features=["-kern"])
+ world = draw.textlength("World", font, features=["-kern"])
+ hello_world = hello + world # kerning is disabled, no need to adjust
+ assert hello_world == draw.textlength("HelloWorld", font, features=["-kern"])
+
+ .. versionadded:: 8.0.0
+
+ :param text: Text to measure.
+ :param mode: Used by some graphics drivers to indicate what mode the
+ driver prefers; if empty, the renderer may return either
+ mode. Note that the mode is always a string, to simplify
+ C-level implementations.
+
+ :param direction: Direction of the text. It can be 'rtl' (right to
+ left), 'ltr' (left to right) or 'ttb' (top to bottom).
+ Requires libraqm.
+
+ :param features: A list of OpenType font features to be used during text
+ layout. This is usually used to turn on optional
+ font features that are not enabled by default,
+ for example 'dlig' or 'ss01', but can be also
+ used to turn off default font features for
+ example '-liga' to disable ligatures or '-kern'
+ to disable kerning. To get all supported
+ features, see
+ https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist
+ Requires libraqm.
+
+ :param language: Language of the text. Different languages may use
+ different glyph shapes or ligatures. This parameter tells
+ the font which language the text is in, and to apply the
+ correct substitutions as appropriate, if available.
+ It should be a `BCP 47 language code
+ `_
+ Requires libraqm.
+
+ :return: Either width for horizontal text, or height for vertical text.
+ """
+ _string_length_check(text)
+ return self.font.getlength(text, mode, direction, features, language) / 64
+
+ def getbbox(
+ self,
+ text,
+ mode="",
+ direction=None,
+ features=None,
+ language=None,
+ stroke_width=0,
+ anchor=None,
+ ):
+ """
+ Returns bounding box (in pixels) of given text relative to given anchor
+ when rendered in font with provided direction, features, and language.
+
+ Use :py:meth:`getlength()` to get the offset of following text with
+ 1/64 pixel precision. The bounding box includes extra margins for
+ some fonts, e.g. italics or accents.
+
+ .. versionadded:: 8.0.0
+
+ :param text: Text to render.
+ :param mode: Used by some graphics drivers to indicate what mode the
+ driver prefers; if empty, the renderer may return either
+ mode. Note that the mode is always a string, to simplify
+ C-level implementations.
+
+ :param direction: Direction of the text. It can be 'rtl' (right to
+ left), 'ltr' (left to right) or 'ttb' (top to bottom).
+ Requires libraqm.
+
+ :param features: A list of OpenType font features to be used during text
+ layout. This is usually used to turn on optional
+ font features that are not enabled by default,
+ for example 'dlig' or 'ss01', but can be also
+ used to turn off default font features for
+ example '-liga' to disable ligatures or '-kern'
+ to disable kerning. To get all supported
+ features, see
+ https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist
+ Requires libraqm.
+
+ :param language: Language of the text. Different languages may use
+ different glyph shapes or ligatures. This parameter tells
+ the font which language the text is in, and to apply the
+ correct substitutions as appropriate, if available.
+ It should be a `BCP 47 language code
+ `_
+ Requires libraqm.
+
+ :param stroke_width: The width of the text stroke.
+
+ :param anchor: The text anchor alignment. Determines the relative location of
+ the anchor to the text. The default alignment is top left.
+ See :ref:`text-anchors` for valid values.
+
+ :return: ``(left, top, right, bottom)`` bounding box
+ """
+ _string_length_check(text)
+ size, offset = self.font.getsize(
+ text, mode, direction, features, language, anchor
+ )
+ left, top = offset[0] - stroke_width, offset[1] - stroke_width
+ width, height = size[0] + 2 * stroke_width, size[1] + 2 * stroke_width
+ return left, top, left + width, top + height
+
+ def getmask(
+ self,
+ text,
+ mode="",
+ direction=None,
+ features=None,
+ language=None,
+ stroke_width=0,
+ anchor=None,
+ ink=0,
+ start=None,
+ ):
+ """
+ Create a bitmap for the text.
+
+ If the font uses antialiasing, the bitmap should have mode ``L`` and use a
+ maximum value of 255. If the font has embedded color data, the bitmap
+ should have mode ``RGBA``. Otherwise, it should have mode ``1``.
+
+ :param text: Text to render.
+ :param mode: Used by some graphics drivers to indicate what mode the
+ driver prefers; if empty, the renderer may return either
+ mode. Note that the mode is always a string, to simplify
+ C-level implementations.
+
+ .. versionadded:: 1.1.5
+
+ :param direction: Direction of the text. It can be 'rtl' (right to
+ left), 'ltr' (left to right) or 'ttb' (top to bottom).
+ Requires libraqm.
+
+ .. versionadded:: 4.2.0
+
+ :param features: A list of OpenType font features to be used during text
+ layout. This is usually used to turn on optional
+ font features that are not enabled by default,
+ for example 'dlig' or 'ss01', but can be also
+ used to turn off default font features for
+ example '-liga' to disable ligatures or '-kern'
+ to disable kerning. To get all supported
+ features, see
+ https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist
+ Requires libraqm.
+
+ .. versionadded:: 4.2.0
+
+ :param language: Language of the text. Different languages may use
+ different glyph shapes or ligatures. This parameter tells
+ the font which language the text is in, and to apply the
+ correct substitutions as appropriate, if available.
+ It should be a `BCP 47 language code
+ `_
+ Requires libraqm.
+
+ .. versionadded:: 6.0.0
+
+ :param stroke_width: The width of the text stroke.
+
+ .. versionadded:: 6.2.0
+
+ :param anchor: The text anchor alignment. Determines the relative location of
+ the anchor to the text. The default alignment is top left.
+ See :ref:`text-anchors` for valid values.
+
+ .. versionadded:: 8.0.0
+
+ :param ink: Foreground ink for rendering in RGBA mode.
+
+ .. versionadded:: 8.0.0
+
+ :param start: Tuple of horizontal and vertical offset, as text may render
+ differently when starting at fractional coordinates.
+
+ .. versionadded:: 9.4.0
+
+ :return: An internal PIL storage memory instance as defined by the
+ :py:mod:`PIL.Image.core` interface module.
+ """
+ return self.getmask2(
+ text,
+ mode,
+ direction=direction,
+ features=features,
+ language=language,
+ stroke_width=stroke_width,
+ anchor=anchor,
+ ink=ink,
+ start=start,
+ )[0]
+
+ def getmask2(
+ self,
+ text,
+ mode="",
+ direction=None,
+ features=None,
+ language=None,
+ stroke_width=0,
+ anchor=None,
+ ink=0,
+ start=None,
+ *args,
+ **kwargs,
+ ):
+ """
+ Create a bitmap for the text.
+
+ If the font uses antialiasing, the bitmap should have mode ``L`` and use a
+ maximum value of 255. If the font has embedded color data, the bitmap
+ should have mode ``RGBA``. Otherwise, it should have mode ``1``.
+
+ :param text: Text to render.
+ :param mode: Used by some graphics drivers to indicate what mode the
+ driver prefers; if empty, the renderer may return either
+ mode. Note that the mode is always a string, to simplify
+ C-level implementations.
+
+ .. versionadded:: 1.1.5
+
+ :param direction: Direction of the text. It can be 'rtl' (right to
+ left), 'ltr' (left to right) or 'ttb' (top to bottom).
+ Requires libraqm.
+
+ .. versionadded:: 4.2.0
+
+ :param features: A list of OpenType font features to be used during text
+ layout. This is usually used to turn on optional
+ font features that are not enabled by default,
+ for example 'dlig' or 'ss01', but can be also
+ used to turn off default font features for
+ example '-liga' to disable ligatures or '-kern'
+ to disable kerning. To get all supported
+ features, see
+ https://learn.microsoft.com/en-us/typography/opentype/spec/featurelist
+ Requires libraqm.
+
+ .. versionadded:: 4.2.0
+
+ :param language: Language of the text. Different languages may use
+ different glyph shapes or ligatures. This parameter tells
+ the font which language the text is in, and to apply the
+ correct substitutions as appropriate, if available.
+ It should be a `BCP 47 language code
+ `_
+ Requires libraqm.
+
+ .. versionadded:: 6.0.0
+
+ :param stroke_width: The width of the text stroke.
+
+ .. versionadded:: 6.2.0
+
+ :param anchor: The text anchor alignment. Determines the relative location of
+ the anchor to the text. The default alignment is top left.
+ See :ref:`text-anchors` for valid values.
+
+ .. versionadded:: 8.0.0
+
+ :param ink: Foreground ink for rendering in RGBA mode.
+
+ .. versionadded:: 8.0.0
+
+ :param start: Tuple of horizontal and vertical offset, as text may render
+ differently when starting at fractional coordinates.
+
+ .. versionadded:: 9.4.0
+
+ :return: A tuple of an internal PIL storage memory instance as defined by the
+ :py:mod:`PIL.Image.core` interface module, and the text offset, the
+ gap between the starting coordinate and the first marking
+ """
+ _string_length_check(text)
+ if start is None:
+ start = (0, 0)
+ im = None
+ size = None
+
+ def fill(mode, im_size):
+ nonlocal im, size
+
+ size = im_size
+ if Image.MAX_IMAGE_PIXELS is not None:
+ pixels = max(1, size[0]) * max(1, size[1])
+ if pixels > 2 * Image.MAX_IMAGE_PIXELS:
+ return
+
+ im = Image.core.fill(mode, size)
+ return im
+
+ offset = self.font.render(
+ text,
+ fill,
+ mode,
+ direction,
+ features,
+ language,
+ stroke_width,
+ anchor,
+ ink,
+ start[0],
+ start[1],
+ )
+ Image._decompression_bomb_check(size)
+ return im, offset
+
+ def font_variant(
+ self, font=None, size=None, index=None, encoding=None, layout_engine=None
+ ):
+ """
+ Create a copy of this FreeTypeFont object,
+ using any specified arguments to override the settings.
+
+ Parameters are identical to the parameters used to initialize this
+ object.
+
+ :return: A FreeTypeFont object.
+ """
+ if font is None:
+ try:
+ font = BytesIO(self.font_bytes)
+ except AttributeError:
+ font = self.path
+ return FreeTypeFont(
+ font=font,
+ size=self.size if size is None else size,
+ index=self.index if index is None else index,
+ encoding=self.encoding if encoding is None else encoding,
+ layout_engine=layout_engine or self.layout_engine,
+ )
+
+ def get_variation_names(self):
+ """
+ :returns: A list of the named styles in a variation font.
+ :exception OSError: If the font is not a variation font.
+ """
+ try:
+ names = self.font.getvarnames()
+ except AttributeError as e:
+ msg = "FreeType 2.9.1 or greater is required"
+ raise NotImplementedError(msg) from e
+ return [name.replace(b"\x00", b"") for name in names]
+
+ def set_variation_by_name(self, name):
+ """
+ :param name: The name of the style.
+ :exception OSError: If the font is not a variation font.
+ """
+ names = self.get_variation_names()
+ if not isinstance(name, bytes):
+ name = name.encode()
+ index = names.index(name) + 1
+
+ if index == getattr(self, "_last_variation_index", None):
+ # When the same name is set twice in a row,
+ # there is an 'unknown freetype error'
+ # https://savannah.nongnu.org/bugs/?56186
+ return
+ self._last_variation_index = index
+
+ self.font.setvarname(index)
+
+ def get_variation_axes(self):
+ """
+ :returns: A list of the axes in a variation font.
+ :exception OSError: If the font is not a variation font.
+ """
+ try:
+ axes = self.font.getvaraxes()
+ except AttributeError as e:
+ msg = "FreeType 2.9.1 or greater is required"
+ raise NotImplementedError(msg) from e
+ for axis in axes:
+ axis["name"] = axis["name"].replace(b"\x00", b"")
+ return axes
+
+ def set_variation_by_axes(self, axes):
+ """
+ :param axes: A list of values for each axis.
+ :exception OSError: If the font is not a variation font.
+ """
+ try:
+ self.font.setvaraxes(axes)
+ except AttributeError as e:
+ msg = "FreeType 2.9.1 or greater is required"
+ raise NotImplementedError(msg) from e
+
+
+class TransposedFont:
+ """Wrapper for writing rotated or mirrored text"""
+
+ def __init__(self, font, orientation=None):
+ """
+ Wrapper that creates a transposed font from any existing font
+ object.
+
+ :param font: A font object.
+ :param orientation: An optional orientation. If given, this should
+ be one of Image.Transpose.FLIP_LEFT_RIGHT, Image.Transpose.FLIP_TOP_BOTTOM,
+ Image.Transpose.ROTATE_90, Image.Transpose.ROTATE_180, or
+ Image.Transpose.ROTATE_270.
+ """
+ self.font = font
+ self.orientation = orientation # any 'transpose' argument, or None
+
+ def getmask(self, text, mode="", *args, **kwargs):
+ im = self.font.getmask(text, mode, *args, **kwargs)
+ if self.orientation is not None:
+ return im.transpose(self.orientation)
+ return im
+
+ def getbbox(self, text, *args, **kwargs):
+ # TransposedFont doesn't support getmask2, move top-left point to (0, 0)
+ # this has no effect on ImageFont and simulates anchor="lt" for FreeTypeFont
+ left, top, right, bottom = self.font.getbbox(text, *args, **kwargs)
+ width = right - left
+ height = bottom - top
+ if self.orientation in (Image.Transpose.ROTATE_90, Image.Transpose.ROTATE_270):
+ return 0, 0, height, width
+ return 0, 0, width, height
+
+ def getlength(self, text, *args, **kwargs):
+ if self.orientation in (Image.Transpose.ROTATE_90, Image.Transpose.ROTATE_270):
+ msg = "text length is undefined for text rotated by 90 or 270 degrees"
+ raise ValueError(msg)
+ _string_length_check(text)
+ return self.font.getlength(text, *args, **kwargs)
+
+
+def load(filename):
+ """
+ Load a font file. This function loads a font object from the given
+ bitmap font file, and returns the corresponding font object.
+
+ :param filename: Name of font file.
+ :return: A font object.
+ :exception OSError: If the file could not be read.
+ """
+ f = ImageFont()
+ f._load_pilfont(filename)
+ return f
+
+
+def truetype(font=None, size=10, index=0, encoding="", layout_engine=None):
+ """
+ Load a TrueType or OpenType font from a file or file-like object,
+ and create a font object.
+ This function loads a font object from the given file or file-like
+ object, and creates a font object for a font of the given size.
+
+ Pillow uses FreeType to open font files. On Windows, be aware that FreeType
+ will keep the file open as long as the FreeTypeFont object exists. Windows
+ limits the number of files that can be open in C at once to 512, so if many
+ fonts are opened simultaneously and that limit is approached, an
+ ``OSError`` may be thrown, reporting that FreeType "cannot open resource".
+ A workaround would be to copy the file(s) into memory, and open that instead.
+
+ This function requires the _imagingft service.
+
+ :param font: A filename or file-like object containing a TrueType font.
+ If the file is not found in this filename, the loader may also
+ search in other directories, such as the :file:`fonts/`
+ directory on Windows or :file:`/Library/Fonts/`,
+ :file:`/System/Library/Fonts/` and :file:`~/Library/Fonts/` on
+ macOS.
+
+ :param size: The requested size, in pixels.
+ :param index: Which font face to load (default is first available face).
+ :param encoding: Which font encoding to use (default is Unicode). Possible
+ encodings include (see the FreeType documentation for more
+ information):
+
+ * "unic" (Unicode)
+ * "symb" (Microsoft Symbol)
+ * "ADOB" (Adobe Standard)
+ * "ADBE" (Adobe Expert)
+ * "ADBC" (Adobe Custom)
+ * "armn" (Apple Roman)
+ * "sjis" (Shift JIS)
+ * "gb " (PRC)
+ * "big5"
+ * "wans" (Extended Wansung)
+ * "joha" (Johab)
+ * "lat1" (Latin-1)
+
+ This specifies the character set to use. It does not alter the
+ encoding of any text provided in subsequent operations.
+ :param layout_engine: Which layout engine to use, if available:
+ :data:`.ImageFont.Layout.BASIC` or :data:`.ImageFont.Layout.RAQM`.
+ If it is available, Raqm layout will be used by default.
+ Otherwise, basic layout will be used.
+
+ Raqm layout is recommended for all non-English text. If Raqm layout
+ is not required, basic layout will have better performance.
+
+ You can check support for Raqm layout using
+ :py:func:`PIL.features.check_feature` with ``feature="raqm"``.
+
+ .. versionadded:: 4.2.0
+ :return: A font object.
+ :exception OSError: If the file could not be read.
+ """
+
+ def freetype(font):
+ return FreeTypeFont(font, size, index, encoding, layout_engine)
+
+ try:
+ return freetype(font)
+ except OSError:
+ if not is_path(font):
+ raise
+ ttf_filename = os.path.basename(font)
+
+ dirs = []
+ if sys.platform == "win32":
+ # check the windows font repository
+ # NOTE: must use uppercase WINDIR, to work around bugs in
+ # 1.5.2's os.environ.get()
+ windir = os.environ.get("WINDIR")
+ if windir:
+ dirs.append(os.path.join(windir, "fonts"))
+ elif sys.platform in ("linux", "linux2"):
+ lindirs = os.environ.get("XDG_DATA_DIRS")
+ if not lindirs:
+ # According to the freedesktop spec, XDG_DATA_DIRS should
+ # default to /usr/share
+ lindirs = "/usr/share"
+ dirs += [os.path.join(lindir, "fonts") for lindir in lindirs.split(":")]
+ elif sys.platform == "darwin":
+ dirs += [
+ "/Library/Fonts",
+ "/System/Library/Fonts",
+ os.path.expanduser("~/Library/Fonts"),
+ ]
+
+ ext = os.path.splitext(ttf_filename)[1]
+ first_font_with_a_different_extension = None
+ for directory in dirs:
+ for walkroot, walkdir, walkfilenames in os.walk(directory):
+ for walkfilename in walkfilenames:
+ if ext and walkfilename == ttf_filename:
+ return freetype(os.path.join(walkroot, walkfilename))
+ elif not ext and os.path.splitext(walkfilename)[0] == ttf_filename:
+ fontpath = os.path.join(walkroot, walkfilename)
+ if os.path.splitext(fontpath)[1] == ".ttf":
+ return freetype(fontpath)
+ if not ext and first_font_with_a_different_extension is None:
+ first_font_with_a_different_extension = fontpath
+ if first_font_with_a_different_extension:
+ return freetype(first_font_with_a_different_extension)
+ raise
+
+
+def load_path(filename):
+ """
+ Load font file. Same as :py:func:`~PIL.ImageFont.load`, but searches for a
+ bitmap font along the Python path.
+
+ :param filename: Name of font file.
+ :return: A font object.
+ :exception OSError: If the file could not be read.
+ """
+ for directory in sys.path:
+ if is_directory(directory):
+ if not isinstance(filename, str):
+ filename = filename.decode("utf-8")
+ try:
+ return load(os.path.join(directory, filename))
+ except OSError:
+ pass
+ msg = "cannot find font file"
+ raise OSError(msg)
+
+
+def load_default(size=None):
+ """If FreeType support is available, load a version of Aileron Regular,
+ https://dotcolon.net/font/aileron, with a more limited character set.
+
+ Otherwise, load a "better than nothing" font.
+
+ .. versionadded:: 1.1.4
+
+ :param size: The font size of Aileron Regular.
+
+ .. versionadded:: 10.1.0
+
+ :return: A font object.
+ """
+ if core.__class__.__name__ == "module" or size is not None:
+ f = truetype(
+ BytesIO(
+ base64.b64decode(
+ b"""
+AAEAAAAPAIAAAwBwRkZUTYwDlUAAADFoAAAAHEdERUYAqADnAAAo8AAAACRHUE9ThhmITwAAKfgAA
+AduR1NVQnHxefoAACkUAAAA4k9TLzJovoHLAAABeAAAAGBjbWFw5lFQMQAAA6gAAAGqZ2FzcP//AA
+MAACjoAAAACGdseWYmRXoPAAAGQAAAHfhoZWFkE18ayQAAAPwAAAA2aGhlYQboArEAAAE0AAAAJGh
+tdHjjERZ8AAAB2AAAAdBsb2NhuOexrgAABVQAAADqbWF4cAC7AEYAAAFYAAAAIG5hbWUr+h5lAAAk
+OAAAA6Jwb3N0D3oPTQAAJ9wAAAEKAAEAAAABGhxJDqIhXw889QALA+gAAAAA0Bqf2QAAAADhCh2h/
+2r/LgOxAyAAAAAIAAIAAAAAAAAAAQAAA8r/GgAAA7j/av9qA7EAAQAAAAAAAAAAAAAAAAAAAHQAAQ
+AAAHQAQwAFAAAAAAACAAAAAQABAAAAQAAAAAAAAAADAfoBkAAFAAgCigJYAAAASwKKAlgAAAFeADI
+BPgAAAAAFAAAAAAAAAAAAAAcAAAAAAAAAAAAAAABVS1dOAEAAIPsCAwL/GgDIA8oA5iAAAJMAAAAA
+AhICsgAAACAAAwH0AAAAAAAAAU0AAADYAAAA8gA5AVMAVgJEAEYCRAA1AuQAKQKOAEAAsAArATsAZ
+AE7AB4CMABVAkQAUADc/+EBEgAgANwAJQEv//sCRAApAkQAggJEADwCRAAtAkQAIQJEADkCRAArAk
+QAMgJEACwCRAAxANwAJQDc/+ECRABnAkQAUAJEAEQB8wAjA1QANgJ/AB0CcwBkArsALwLFAGQCSwB
+kAjcAZALGAC8C2gBkAQgAZAIgADcCYQBkAj8AZANiAGQCzgBkAuEALwJWAGQC3QAvAmsAZAJJADQC
+ZAAiAqoAXgJuACADuAAaAnEAGQJFABMCTwAuATMAYgEv//sBJwAiAkQAUAH0ADIBLAApAhMAJAJjA
+EoCEQAeAmcAHgIlAB4BIgAVAmcAHgJRAEoA7gA+AOn/8wIKAEoA9wBGA1cASgJRAEoCSgAeAmMASg
+JnAB4BSgBKAcsAGAE5ABQCUABCAgIAAQMRAAEB4v/6AgEAAQHOABQBLwBAAPoAYAEvACECRABNA0Y
+AJAItAHgBKgAcAkQAUAEsAHQAygAgAi0AOQD3ADYA9wAWAaEANgGhABYCbAAlAYMAeAGDADkA6/9q
+AhsAFAIKABUB/QAVAAAAAwAAAAMAAAAcAAEAAAAAAKQAAwABAAAAHAAEAIgAAAAeABAAAwAOAH4Aq
+QCrALEAtAC3ALsgGSAdICYgOiBEISL7Av//AAAAIACpAKsAsAC0ALcAuyAYIBwgJiA5IEQhIvsB//
+//4/+5/7j/tP+y/7D/reBR4E/gR+A14CzfTwVxAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAEGAAABAAAAAAAAAAECAAAAAgAAAAAAAAAAAAAAAAAAAAEAAAMEBQYHCAkKCwwNDg8QERIT
+FBUWFxgZGhscHR4fICEiIyQlJicoKSorLC0uLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVGR0hJSktMT
+U5PUFFSU1RVVldYWVpbXF1eX2BhAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGQAAA
+AAAAAAYnFmAAAAAABlAAAAAAAAAAAAAAAAAAAAAAAAAAAAY2htAAAAAAAAAABrbGlqAAAAAHAAbm9
+ycwBnAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmACYAJgAmAD4AUgCCAMoBCgFO
+AVwBcgGIAaYBvAHKAdYB6AH2AgwCIAJKAogCpgLWAw4DIgNkA5wDugPUA+gD/AQQBEYEogS8BPoFJ
+gVSBWoFgAWwBcoF1gX6BhQGJAZMBmgGiga0BuIHGgdUB2YHkAeiB8AH3AfyCAoIHAgqCDoITghcCG
+oIogjSCPoJKglYCXwJwgnqCgIKKApACl4Klgq8CtwLDAs8C1YLjAuyC9oL7gwMDCYMSAxgDKAMrAz
+qDQoNTA1mDYQNoA2uDcAN2g3oDfYODA4iDkoOXA5sDnoOnA7EDvwAAAAFAAAAAAH0ArwAAwAGAAkA
+DAAPAAAxESERAxMhExcRASELARETAfT6qv6syKr+jgFUqsiqArz9RAGLAP/+1P8B/v3VAP8BLP4CA
+P8AAgA5//IAuQKyAAMACwAANyMDMwIyFhQGIiY0oE4MZk84JCQ4JLQB/v3AJDgkJDgAAgBWAeUBPA
+LfAAMABwAAEyMnMxcjJzOmRgpagkYKWgHl+vr6AAAAAAIARgAAAf4CsgAbAB8AAAEHMxUjByM3Iwc
+jNyM1MzcjNTM3MwczNzMHMxUrAQczAZgdZXEvOi9bLzovWmYdZXEvOi9bLzovWp9bHlsBn4w429vb
+2ziMONvb29s4jAAAAAMANf+mAg4DDAAfACYALAAAJRQGBxUjNS4BJzMeARcRLgE0Njc1MxUeARcjJ
+icVHgEBFBYXNQ4BExU+ATU0Ag5xWDpgcgRcBz41Xl9oVTpVYwpcC1ttXP6cLTQuM5szOrVRZwlOTQ
+ZqVzZECAEAGlukZAlOTQdrUG8O7iNlAQgxNhDlCDj+8/YGOjReAAAAAAUAKf/yArsCvAAHAAsAFQA
+dACcAABIyFhQGIiY0EyMBMwQiBhUUFjI2NTQSMhYUBiImNDYiBhUUFjI2NTR5iFBQiFCVVwHAV/5c
+OiMjOiPmiFBQiFCxOiMjOiMCvFaSVlaS/ZoCsjIzMC80NC8w/uNWklZWkhozMC80NC8wAAAAAgBA/
+/ICbgLAACIALgAAARUjEQYjIiY1NDY3LgE1NDYzMhcVJiMiBhUUFhcWOwE1MxUFFBYzMjc1IyIHDg
+ECbmBcYYOOVkg7R4hsQjY4Q0RNRD4SLDxW/pJUXzksPCkUUk0BgUb+zBVUZ0BkDw5RO1huCkULQzp
+COAMBcHDHRz0J/AIHRQAAAAEAKwHlAIUC3wADAAATIycze0YKWgHl+gAAAAABAGT/sAEXAwwACQAA
+EzMGEBcjLgE0Nt06dXU6OUBAAwzG/jDGVePs4wAAAAEAHv+wANEDDAAJAAATMx4BFAYHIzYQHjo5Q
+EA5OnUDDFXj7ONVxgHQAAAAAQBVAFIB2wHbAA4AAAE3FwcXBycHJzcnNxcnMwEtmxOfcTJjYzJxnx
+ObCj4BKD07KYolmZkliik7PbMAAQBQAFUB9AIlAAsAAAEjFSM1IzUzNTMVMwH0tTq1tTq1AR/Kyjj
+OzgAAAAAB/+H/iACMAGQABAAANwcjNzOMWlFOXVrS3AAAAQAgAP8A8gE3AAMAABMjNTPy0tIA/zgA
+AQAl//IApQByAAcAADYyFhQGIiY0STgkJDgkciQ4JCQ4AAAAAf/7/+IBNALQAAMAABcjEzM5Pvs+H
+gLuAAAAAAIAKf/yAhsCwAADAAcAABIgECA2IBAgKQHy/g5gATL+zgLA/TJEAkYAAAAAAQCCAAABlg
+KyAAgAAAERIxEHNTc2MwGWVr6SIygCsv1OAldxW1sWAAEAPAAAAg4CwAAZAAA3IRUhNRM+ATU0JiM
+iDwEjNz4BMzIWFRQGB7kBUv4x+kI2QTt+EAFWAQp8aGVtSl5GRjEA/0RVLzlLmAoKa3FsUkNxXQAA
+AAEALf/yAhYCwAAqAAABHgEVFAYjIi8BMxceATMyNjU0KwE1MzI2NTQmIyIGDwEjNz4BMzIWFRQGA
+YxBSZJo2RUBVgEHV0JBUaQREUBUQzc5TQcBVgEKfGhfcEMBbxJbQl1x0AoKRkZHPn9GSD80QUVCCg
+pfbGBPOlgAAAACACEAAAIkArIACgAPAAAlIxUjNSE1ATMRMyMRBg8BAiRXVv6qAVZWV60dHLCurq4
+rAdn+QgFLMibzAAABADn/8gIZArIAHQAAATIWFRQGIyIvATMXFjMyNjU0JiMiByMTIRUhBzc2ATNv
+d5Fl1RQBVgIad0VSTkVhL1IwAYj+vh8rMAHHgGdtgcUKCoFXTU5bYgGRRvAuHQAAAAACACv/8gITA
+sAAFwAjAAABMhYVFAYjIhE0NjMyFh8BIycmIyIDNzYTMjY1NCYjIgYVFBYBLmp7imr0l3RZdAgBXA
+IYZ5wKJzU6QVNJSz5SUAHSgWltiQFGxcNlVQoKdv7sPiz+ZF1LTmJbU0lhAAAAAQAyAAACGgKyAAY
+AAAEVASMBITUCGv6oXAFL/oECsij9dgJsRgAAAAMALP/xAhgCwAAWACAALAAAAR4BFRQGIyImNTQ2
+Ny4BNTQ2MhYVFAYmIgYVFBYyNjU0AzI2NTQmIyIGFRQWAZQ5S5BmbIpPOjA7ecp5P2F8Q0J8RIVJS
+0pLTEtOAW0TXTxpZ2ZqPF0SE1A3VWVlVTdQ/UU0N0RENzT9/ko+Ok1NOj1LAAIAMf/yAhkCwAAXAC
+MAAAEyERQGIyImLwEzFxYzMhMHBiMiJjU0NhMyNjU0JiMiBhUUFgEl9Jd0WXQIAVwCGGecCic1SWp
+7imo+UlBAQVNJAsD+usXDZVUKCnYBFD4sgWltif5kW1NJYV1LTmIAAAACACX/8gClAiAABwAPAAAS
+MhYUBiImNBIyFhQGIiY0STgkJDgkJDgkJDgkAiAkOCQkOP52JDgkJDgAAAAC/+H/iAClAiAABwAMA
+AASMhYUBiImNBMHIzczSTgkJDgkaFpSTl4CICQ4JCQ4/mba5gAAAQBnAB4B+AH0AAYAAAENARUlNS
+UB+P6qAVb+bwGRAbCmpkbJRMkAAAIAUAC7AfQBuwADAAcAAAEhNSERITUhAfT+XAGk/lwBpAGDOP8
+AOAABAEQAHgHVAfQABgAAARUFNS0BNQHV/m8BVv6qAStEyUSmpkYAAAAAAgAj//IB1ALAABgAIAAA
+ATIWFRQHDgEHIz4BNz4BNTQmIyIGByM+ARIyFhQGIiY0AQRibmktIAJWBSEqNig+NTlHBFoDezQ4J
+CQ4JALAZ1BjaS03JS1DMD5LLDQ/SUVgcv2yJDgkJDgAAAAAAgA2/5gDFgKYADYAQgAAAQMGFRQzMj
+Y1NCYjIg4CFRQWMzI2NxcGIyImNTQ+AjMyFhUUBiMiJwcGIyImNTQ2MzIfATcHNzYmIyIGFRQzMjY
+Cej8EJjJJlnBAfGQ+oHtAhjUYg5OPx0h2k06Os3xRWQsVLjY5VHtdPBwJETcJDyUoOkZEJz8B0f74
+EQ8kZl6EkTFZjVOLlyknMVm1pmCiaTq4lX6CSCknTVRmmR8wPdYnQzxuSWVGAAIAHQAAAncCsgAHA
+AoAACUjByMTMxMjATMDAcj+UVz4dO5d/sjPZPT0ArL9TgE6ATQAAAADAGQAAAJMArIAEAAbACcAAA
+EeARUUBgcGKwERMzIXFhUUJRUzMjc2NTQnJiMTPgE1NCcmKwEVMzIBvkdHZkwiNt7LOSGq/oeFHBt
+hahIlSTM+cB8Yj5UWAW8QT0VYYgwFArIEF5Fv1eMED2NfDAL93AU+N24PBP0AAAAAAQAv//ICjwLA
+ABsAAAEyFh8BIycmIyIGFRQWMzI/ATMHDgEjIiY1NDYBdX+PCwFWAiKiaHx5ZaIiAlYBCpWBk6a0A
+sCAagoKpqN/gaOmCgplhcicn8sAAAIAZAAAAp8CsgAMABkAAAEeARUUBgcGKwERMzITPgE1NCYnJi
+sBETMyAY59lJp8IzXN0jUVWmdjWRs5d3I4Aq4QqJWUug8EArL9mQ+PeHGHDgX92gAAAAABAGQAAAI
+vArIACwAAJRUhESEVIRUhFSEVAi/+NQHB/pUBTf6zRkYCskbwRvAAAAABAGQAAAIlArIACQAAExUh
+FSERIxEhFboBQ/69VgHBAmzwRv7KArJGAAAAAAEAL//yAo8CwAAfAAABMxEjNQcGIyImNTQ2MzIWH
+wEjJyYjIgYVFBYzMjY1IwGP90wfPnWTprSSf48LAVYCIqJofHllVG+hAU3+s3hARsicn8uAagoKpq
+N/gaN1XAAAAAEAZAAAAowCsgALAAABESMRIREjETMRIRECjFb+hFZWAXwCsv1OAS7+0gKy/sQBPAA
+AAAABAGQAAAC6ArIAAwAAMyMRM7pWVgKyAAABADf/8gHoArIAEwAAAREUBw4BIyImLwEzFxYzMjc2
+NREB6AIFcGpgbQIBVgIHfXQKAQKy/lYxIltob2EpKYyEFD0BpwAAAAABAGQAAAJ0ArIACwAACQEjA
+wcVIxEzEQEzATsBJ3ntQlZWAVVlAWH+nwEnR+ACsv6RAW8AAQBkAAACLwKyAAUAACUVIREzEQIv/j
+VWRkYCsv2UAAABAGQAAAMUArIAFAAAAREjETQ3BgcDIwMmJxYVESMRMxsBAxRWAiMxemx8NxsCVo7
+MywKy/U4BY7ZLco7+nAFmoFxLtP6dArL9lwJpAAAAAAEAZAAAAoACsgANAAAhIwEWFREjETMBJjUR
+MwKAhP67A1aEAUUDVAJeeov+pwKy/aJ5jAFZAAAAAgAv//ICuwLAAAkAEwAAEiAWFRQGICY1NBIyN
+jU0JiIGFRTbATSsrP7MrNrYenrYegLAxaKhxsahov47nIeIm5uIhwACAGQAAAJHArIADgAYAAABHg
+EVFAYHBisBESMRMzITNjQnJisBETMyAZRUX2VOHzuAVtY7GlxcGDWIiDUCrgtnVlVpCgT+5gKy/rU
+V1BUF/vgAAAACAC//zAK9AsAAEgAcAAAlFhcHJiMiBwYjIiY1NDYgFhUUJRQWMjY1NCYiBgI9PUMx
+UDcfKh8omqysATSs/dR62Hp62HpICTg7NgkHxqGixcWitbWHnJyHiJubAAIAZAAAAlgCsgAXACMAA
+CUWFyMmJyYnJisBESMRMzIXHgEVFAYHFiUzMjc+ATU0JyYrAQIqDCJfGQwNWhAhglbiOx9QXEY1Tv
+6bhDATMj1lGSyMtYgtOXR0BwH+1wKyBApbU0BSESRAAgVAOGoQBAABADT/8gIoAsAAJQAAATIWFyM
+uASMiBhUUFhceARUUBiMiJiczHgEzMjY1NCYnLgE1NDYBOmd2ClwGS0E6SUNRdW+HZnKKC1wPWkQ9
+Uk1cZGuEAsBwXUJHNjQ3OhIbZVZZbm5kREo+NT5DFRdYUFdrAAAAAAEAIgAAAmQCsgAHAAABIxEjE
+SM1IQJk9lb2AkICbP2UAmxGAAEAXv/yAmQCsgAXAAABERQHDgEiJicmNREzERQXHgEyNjc2NRECZA
+IIgfCBCAJWAgZYmlgGAgKy/k0qFFxzc1wUKgGz/lUrEkRQUEQSKwGrAAAAAAEAIAAAAnoCsgAGAAA
+hIwMzGwEzAYJ07l3N1FwCsv2PAnEAAAEAGgAAA7ECsgAMAAABAyMLASMDMxsBMxsBA7HAcZyicrZi
+kaB0nJkCsv1OAlP9rQKy/ZsCW/2kAmYAAAEAGQAAAm8CsgALAAAhCwEjEwMzGwEzAxMCCsrEY/bkY
+re+Y/D6AST+3AFcAVb+5gEa/q3+oQAAAQATAAACUQKyAAgAAAERIxEDMxsBMwFdVvRjwLphARD+8A
+EQAaL+sQFPAAABAC4AAAI5ArIACQAAJRUhNQEhNSEVAQI5/fUBof57Aen+YUZGQgIqRkX92QAAAAA
+BAGL/sAEFAwwABwAAARUjETMVIxEBBWlpowMMOP0UOANcAAAB//v/4gE0AtAAAwAABSMDMwE0Pvs+
+HgLuAAAAAQAi/7AAxQMMAAcAABcjNTMRIzUzxaNpaaNQOALsOAABAFAA1wH0AmgABgAAJQsBIxMzE
+wGwjY1GsESw1wFZ/qcBkf5vAAAAAQAy/6oBwv/iAAMAAAUhNSEBwv5wAZBWOAAAAAEAKQJEALYCsg
+ADAAATIycztjhVUAJEbgAAAAACACT/8gHQAiAAHQAlAAAhJwcGIyImNTQ2OwE1NCcmIyIHIz4BMzI
+XFh0BFBcnMjY9ASYVFAF6CR0wVUtgkJoiAgdgaQlaBm1Zrg4DCuQ9R+5MOSFQR1tbDiwUUXBUXowf
+J8c9SjRORzYSgVwAAAAAAgBK//ICRQLfABEAHgAAATIWFRQGIyImLwEVIxEzETc2EzI2NTQmIyIGH
+QEUFgFUcYCVbiNJEyNWVigySElcU01JXmECIJd4i5QTEDRJAt/+3jkq/hRuZV55ZWsdX14AAQAe//
+IB9wIgABgAAAEyFhcjJiMiBhUUFjMyNjczDgEjIiY1NDYBF152DFocbEJXU0A1Rw1aE3pbaoKQAiB
+oWH5qZm1tPDlaXYuLgZcAAAACAB7/8gIZAt8AEQAeAAABESM1BwYjIiY1NDYzMhYfAREDMjY9ATQm
+IyIGFRQWAhlWKDJacYCVbiNJEyOnSV5hQUlcUwLf/SFVOSqXeIuUExA0ARb9VWVrHV9ebmVeeQACA
+B7/8gH9AiAAFQAbAAABFAchHgEzMjY3Mw4BIyImNTQ2MzIWJyIGByEmAf0C/oAGUkA1SwlaD4FXbI
+WObmt45UBVBwEqDQEYFhNjWD84W16Oh3+akU9aU60AAAEAFQAAARoC8gAWAAATBh0BMxUjESMRIzU
+zNTQ3PgEzMhcVJqcDbW1WOTkDB0k8Hx5oAngVITRC/jQBzEIsJRs5PwVHEwAAAAIAHv8uAhkCIAAi
+AC8AAAERFAcOASMiLwEzFx4BMzI2NzY9AQcGIyImNTQ2MzIWHwE1AzI2PQE0JiMiBhUUFgIZAQSEd
+NwRAVcBBU5DTlUDASgyWnGAlW4jSRMjp0leYUFJXFMCEv5wSh1zeq8KCTI8VU0ZIQk5Kpd4i5QTED
+RJ/iJlax1fXm5lXnkAAQBKAAACCgLkABcAAAEWFREjETQnLgEHDgEdASMRMxE3NjMyFgIIAlYCBDs
+6RVRWViE5UVViAYUbQP7WASQxGzI7AQJyf+kC5P7TPSxUAAACAD4AAACsAsAABwALAAASMhYUBiIm
+NBMjETNeLiAgLiBiVlYCwCAuICAu/WACEgAC//P/LgCnAsAABwAVAAASMhYUBiImNBcRFAcGIyInN
+RY3NjURWS4gIC4gYgMLcRwNSgYCAsAgLiAgLo79wCUbZAJGBzMOHgJEAAAAAQBKAAACCALfAAsAAC
+EnBxUjETMREzMHEwGTwTJWVvdu9/rgN6kC3/4oAQv6/ugAAQBG//wA3gLfAA8AABMRFBceATcVBiM
+iJicmNRGcAQIcIxkkKi4CAQLf/bkhERoSBD4EJC8SNAJKAAAAAQBKAAADEAIgACQAAAEWFREjETQn
+JiMiFREjETQnJiMiFREjETMVNzYzMhYXNzYzMhYDCwVWBAxedFYEDF50VlYiJko7ThAvJkpEVAGfI
+jn+vAEcQyRZ1v76ARxDJFnW/voCEk08HzYtRB9HAAAAAAEASgAAAgoCIAAWAAABFhURIxE0JyYjIg
+YdASMRMxU3NjMyFgIIAlYCCXBEVVZWITlRVWIBhRtA/tYBJDEbbHR/6QISWz0sVAAAAAACAB7/8gI
+sAiAABwARAAASIBYUBiAmNBIyNjU0JiIGFRSlAQCHh/8Ah7ieWlqeWgIgn/Cfn/D+s3ZfYHV1YF8A
+AgBK/zwCRQIgABEAHgAAATIWFRQGIyImLwERIxEzFTc2EzI2NTQmIyIGHQEUFgFUcYCVbiNJEyNWV
+igySElcU01JXmECIJd4i5QTEDT+8wLWVTkq/hRuZV55ZWsdX14AAgAe/zwCGQIgABEAHgAAAREjEQ
+cGIyImNTQ2MzIWHwE1AzI2PQE0JiMiBhUUFgIZVigyWnGAlW4jSRMjp0leYUFJXFMCEv0qARk5Kpd
+4i5QTEDRJ/iJlax1fXm5lXnkAAQBKAAABPgIeAA0AAAEyFxUmBhURIxEzFTc2ARoWDkdXVlYwIwIe
+B0EFVlf+0gISU0cYAAEAGP/yAa0CIAAjAAATMhYXIyYjIgYVFBYXHgEVFAYjIiYnMxYzMjY1NCYnL
+gE1NDbkV2MJWhNdKy04PF1XbVhWbgxaE2ktOjlEUllkAiBaS2MrJCUoEBlPQkhOVFZoKCUmLhIWSE
+BIUwAAAAEAFP/4ARQCiQAXAAATERQXHgE3FQYjIiYnJjURIzUzNTMVMxWxAQMmMx8qMjMEAUdHVmM
+BzP7PGw4mFgY/BSwxDjQBNUJ7e0IAAAABAEL/8gICAhIAFwAAAREjNQcGIyImJyY1ETMRFBceATMy
+Nj0BAgJWITlRT2EKBVYEBkA1RFECEv3uWj4qTToiOQE+/tIlJC43c4DpAAAAAAEAAQAAAfwCEgAGA
+AABAyMDMxsBAfzJaclfop8CEv3uAhL+LQHTAAABAAEAAAMLAhIADAAAAQMjCwEjAzMbATMbAQMLqW
+Z2dmapY3t0a3Z7AhL97gG+/kICEv5AAcD+QwG9AAAB//oAAAHWAhIACwAAARMjJwcjEwMzFzczARq
+8ZIuKY763ZoWFYwEO/vLV1QEMAQbNzQAAAQAB/y4B+wISABEAAAEDDgEjIic1FjMyNj8BAzMbAQH7
+2iFZQB8NDRIpNhQH02GenQIS/cFVUAJGASozEwIt/i4B0gABABQAAAGxAg4ACQAAJRUhNQEhNSEVA
+QGx/mMBNP7iAYL+zkREQgGIREX+ewAAAAABAED/sAEOAwwALAAAASMiBhUUFxYVFAYHHgEVFAcGFR
+QWOwEVIyImNTQ3NjU0JzU2NTQnJjU0NjsBAQ4MKiMLDS4pKS4NCyMqDAtERAwLUlILDERECwLUGBk
+WTlsgKzUFBTcrIFtOFhkYOC87GFVMIkUIOAhFIkxVGDsvAAAAAAEAYP84AJoDIAADAAAXIxEzmjo6
+yAPoAAEAIf+wAO8DDAAsAAATFQYVFBcWFRQGKwE1MzI2NTQnJjU0NjcuATU0NzY1NCYrATUzMhYVF
+AcGFRTvUgsMREQLDCojCw0uKSkuDQsjKgwLREQMCwF6OAhFIkxVGDsvOBgZFk5bICs1BQU3KyBbTh
+YZGDgvOxhVTCJFAAABAE0A3wH2AWQAEwAAATMUIyImJyYjIhUjNDMyFhcWMzIBvjhuGywtQR0xOG4
+bLC1BHTEBZIURGCNMhREYIwAAAwAk/94DIgLoAAcAEQApAAAAIBYQBiAmECQgBhUUFiA2NTQlMhYX
+IyYjIgYUFjMyNjczDgEjIiY1NDYBAQFE3d3+vN0CB/7wubkBELn+xVBnD1wSWDo+QTcqOQZcEmZWX
+HN2Aujg/rbg4AFKpr+Mjb6+jYxbWEldV5ZZNShLVn5na34AAgB4AFIB9AGeAAUACwAAAQcXIyc3Mw
+cXIyc3AUqJiUmJifOJiUmJiQGepqampqampqYAAAIAHAHSAQ4CwAAHAA8AABIyFhQGIiY0NiIGFBY
+yNjRgakREakSTNCEhNCECwEJqQkJqCiM4IyM4AAAAAAIAUAAAAfQCCwALAA8AAAEzFSMVIzUjNTM1
+MxMhNSEBP7W1OrW1OrX+XAGkAVs4tLQ4sP31OAAAAQB0AkQBAQKyAAMAABMjNzOsOD1QAkRuAAAAA
+AEAIADsAKoBdgAHAAASMhYUBiImNEg6KCg6KAF2KDooKDoAAAIAOQBSAbUBngAFAAsAACUHIzcnMw
+UHIzcnMwELiUmJiUkBM4lJiYlJ+KampqampqYAAAABADYB5QDhAt8ABAAAEzczByM2Xk1OXQHv8Po
+AAQAWAeUAwQLfAAQAABMHIzczwV5NTl0C1fD6AAIANgHlAYsC3wAEAAkAABM3MwcjPwEzByM2Xk1O
+XapeTU5dAe/w+grw+gAAAgAWAeUBawLfAAQACQAAEwcjNzMXByM3M8FeTU5dql5NTl0C1fD6CvD6A
+AADACX/8gI1AHIABwAPABcAADYyFhQGIiY0NjIWFAYiJjQ2MhYUBiImNEk4JCQ4JOw4JCQ4JOw4JC
+Q4JHIkOCQkOCQkOCQkOCQkOCQkOAAAAAEAeABSAUoBngAFAAABBxcjJzcBSomJSYmJAZ6mpqamAAA
+AAAEAOQBSAQsBngAFAAAlByM3JzMBC4lJiYlJ+KampgAAAf9qAAABgQKyAAMAACsBATM/VwHAVwKy
+AAAAAAIAFAHIAdwClAAHABQAABMVIxUjNSM1BRUjNwcjJxcjNTMXN9pKMkoByDICKzQqATJLKysCl
+CmjoykBy46KiY3Lm5sAAQAVAAABvALyABgAAAERIxEjESMRIzUzNTQ3NjMyFxUmBgcGHQEBvFbCVj
+k5AxHHHx5iVgcDAg798gHM/jQBzEIOJRuWBUcIJDAVIRYAAAABABX//AHkAvIAJQAAJR4BNxUGIyI
+mJyY1ESYjIgcGHQEzFSMRIxEjNTM1NDc2MzIXERQBowIcIxkkKi4CAR4nXgwDbW1WLy8DEbNdOmYa
+EQQ/BCQvEjQCFQZWFSEWQv40AcxCDiUblhP9uSEAAAAAAAAWAQ4AAQAAAAAAAAATACgAAQAAAAAAA
+QAHAEwAAQAAAAAAAgAHAGQAAQAAAAAAAwAaAKIAAQAAAAAABAAHAM0AAQAAAAAABQA8AU8AAQAAAA
+AABgAPAawAAQAAAAAACAALAdQAAQAAAAAACQALAfgAAQAAAAAACwAXAjQAAQAAAAAADAAXAnwAAwA
+BBAkAAAAmAAAAAwABBAkAAQAOADwAAwABBAkAAgAOAFQAAwABBAkAAwA0AGwAAwABBAkABAAOAL0A
+AwABBAkABQB4ANUAAwABBAkABgAeAYwAAwABBAkACAAWAbwAAwABBAkACQAWAeAAAwABBAkACwAuA
+gQAAwABBAkADAAuAkwATgBvACAAUgBpAGcAaAB0AHMAIABSAGUAcwBlAHIAdgBlAGQALgAATm8gUm
+lnaHRzIFJlc2VydmVkLgAAQQBpAGwAZQByAG8AbgAAQWlsZXJvbgAAUgBlAGcAdQBsAGEAcgAAUmV
+ndWxhcgAAMQAuADEAMAAyADsAVQBLAFcATgA7AEEAaQBsAGUAcgBvAG4ALQBSAGUAZwB1AGwAYQBy
+AAAxLjEwMjtVS1dOO0FpbGVyb24tUmVndWxhcgAAQQBpAGwAZQByAG8AbgAAQWlsZXJvbgAAVgBlA
+HIAcwBpAG8AbgAgADEALgAxADAAMgA7AFAAUwAgADAAMAAxAC4AMQAwADIAOwBoAG8AdABjAG8Abg
+B2ACAAMQAuADAALgA3ADAAOwBtAGEAawBlAG8AdABmAC4AbABpAGIAMgAuADUALgA1ADgAMwAyADk
+AAFZlcnNpb24gMS4xMDI7UFMgMDAxLjEwMjtob3Rjb252IDEuMC43MDttYWtlb3RmLmxpYjIuNS41
+ODMyOQAAQQBpAGwAZQByAG8AbgAtAFIAZQBnAHUAbABhAHIAAEFpbGVyb24tUmVndWxhcgAAUwBvA
+HIAYQAgAFMAYQBnAGEAbgBvAABTb3JhIFNhZ2FubwAAUwBvAHIAYQAgAFMAYQBnAGEAbgBvAABTb3
+JhIFNhZ2FubwAAaAB0AHQAcAA6AC8ALwB3AHcAdwAuAGQAbwB0AGMAbwBsAG8AbgAuAG4AZQB0AAB
+odHRwOi8vd3d3LmRvdGNvbG9uLm5ldAAAaAB0AHQAcAA6AC8ALwB3AHcAdwAuAGQAbwB0AGMAbwBs
+AG8AbgAuAG4AZQB0AABodHRwOi8vd3d3LmRvdGNvbG9uLm5ldAAAAAACAAAAAAAA/4MAMgAAAAAAA
+AAAAAAAAAAAAAAAAAAAAHQAAAABAAIAAwAEAAUABgAHAAgACQAKAAsADAANAA4ADwAQABEAEgATAB
+QAFQAWABcAGAAZABoAGwAcAB0AHgAfACAAIQAiACMAJAAlACYAJwAoACkAKgArACwALQAuAC8AMAA
+xADIAMwA0ADUANgA3ADgAOQA6ADsAPAA9AD4APwBAAEEAQgBDAEQARQBGAEcASABJAEoASwBMAE0A
+TgBPAFAAUQBSAFMAVABVAFYAVwBYAFkAWgBbAFwAXQBeAF8AYABhAIsAqQCDAJMAjQDDAKoAtgC3A
+LQAtQCrAL4AvwC8AIwAwADBAAAAAAAB//8AAgABAAAADAAAABwAAAACAAIAAwBxAAEAcgBzAAIABA
+AAAAIAAAABAAAACgBMAGYAAkRGTFQADmxhdG4AGgAEAAAAAP//AAEAAAAWAANDQVQgAB5NT0wgABZ
+ST00gABYAAP//AAEAAAAA//8AAgAAAAEAAmxpZ2EADmxvY2wAFAAAAAEAAQAAAAEAAAACAAYAEAAG
+AAAAAgASADQABAAAAAEATAADAAAAAgAQABYAAQAcAAAAAQABAE8AAQABAGcAAQABAE8AAwAAAAIAE
+AAWAAEAHAAAAAEAAQAvAAEAAQBnAAEAAQAvAAEAGgABAAgAAgAGAAwAcwACAE8AcgACAEwAAQABAE
+kAAAABAAAACgBGAGAAAkRGTFQADmxhdG4AHAAEAAAAAP//AAIAAAABABYAA0NBVCAAFk1PTCAAFlJ
+PTSAAFgAA//8AAgAAAAEAAmNwc3AADmtlcm4AFAAAAAEAAAAAAAEAAQACAAYADgABAAAAAQASAAIA
+AAACAB4ANgABAAoABQAFAAoAAgABACQAPQAAAAEAEgAEAAAAAQAMAAEAOP/nAAEAAQAkAAIGigAEA
+AAFJAXKABoAGQAA//gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAD/sv+4/+z/7v/MAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAD/xAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/9T/6AAAAAD/8QAA
+ABD/vQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/7gAAAAAAAAAAAAAAAAAA//MAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABIAAAAAAAAAAP/5AAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/gAAD/4AAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//L/9AAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAA/+gAAAAAAAkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/zAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/mAAAAAAAAAAAAAAAAAAD
+/4gAA//AAAAAA//YAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/+AAAAAAAAP/OAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/zv/qAAAAAP/0AAAACAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/ZAAD/egAA/1kAAAAA/5D/rgAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAD/9AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAD/8AAA/7b/8P+wAAD/8P/E/98AAAAA/8P/+P/0//oAAAAAAAAAAAAA//gA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/+AAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/w//C/9MAAP/SAAD/9wAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAD/yAAA/+kAAAAA//QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/9wAAAAD//QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAP/2AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAP/cAAAAAAAAAAAAAAAA/7YAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAP/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/6AAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAkAFAAEAAAAAQACwAAABcA
+BgAAAAAAAAAIAA4AAAAAAAsAEgAAAAAAAAATABkAAwANAAAAAQAJAAAAAAAAAAAAAAAAAAAAGAAAA
+AAABwAAAAAAAAAAAAAAFQAFAAAAAAAYABgAAAAUAAAACgAAAAwAAgAPABEAFgAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFAAEAEQBdAAYAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAcAAAAAAAAABwAAAAAACAAAAAAAAAAAAAcAAAAHAAAAEwAJ
+ABUADgAPAAAACwAQAAAAAAAAAAAAAAAAAAUAGAACAAIAAgAAAAIAGAAXAAAAGAAAABYAFgACABYAA
+gAWAAAAEQADAAoAFAAMAA0ABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASAAAAEgAGAAEAHgAkAC
+YAJwApACoALQAuAC8AMgAzADcAOAA5ADoAPAA9AEUASABOAE8AUgBTAFUAVwBZAFoAWwBcAF0AcwA
+AAAAAAQAAAADa3tfFAAAAANAan9kAAAAA4QodoQ==
+"""
+ )
+ ),
+ 10 if size is None else size,
+ layout_engine=Layout.BASIC,
+ )
+ else:
+ f = ImageFont()
+ f._load_pilfont_data(
+ # courB08
+ BytesIO(
+ base64.b64decode(
+ b"""
+UElMZm9udAo7Ozs7OzsxMDsKREFUQQoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAA//8AAQAAAAAAAAABAAEA
+BgAAAAH/+gADAAAAAQAAAAMABgAGAAAAAf/6AAT//QADAAAABgADAAYAAAAA//kABQABAAYAAAAL
+AAgABgAAAAD/+AAFAAEACwAAABAACQAGAAAAAP/5AAUAAAAQAAAAFQAHAAYAAP////oABQAAABUA
+AAAbAAYABgAAAAH/+QAE//wAGwAAAB4AAwAGAAAAAf/5AAQAAQAeAAAAIQAIAAYAAAAB//kABAAB
+ACEAAAAkAAgABgAAAAD/+QAE//0AJAAAACgABAAGAAAAAP/6AAX//wAoAAAALQAFAAYAAAAB//8A
+BAACAC0AAAAwAAMABgAAAAD//AAF//0AMAAAADUAAQAGAAAAAf//AAMAAAA1AAAANwABAAYAAAAB
+//kABQABADcAAAA7AAgABgAAAAD/+QAFAAAAOwAAAEAABwAGAAAAAP/5AAYAAABAAAAARgAHAAYA
+AAAA//kABQAAAEYAAABLAAcABgAAAAD/+QAFAAAASwAAAFAABwAGAAAAAP/5AAYAAABQAAAAVgAH
+AAYAAAAA//kABQAAAFYAAABbAAcABgAAAAD/+QAFAAAAWwAAAGAABwAGAAAAAP/5AAUAAABgAAAA
+ZQAHAAYAAAAA//kABQAAAGUAAABqAAcABgAAAAD/+QAFAAAAagAAAG8ABwAGAAAAAf/8AAMAAABv
+AAAAcQAEAAYAAAAA//wAAwACAHEAAAB0AAYABgAAAAD/+gAE//8AdAAAAHgABQAGAAAAAP/7AAT/
+/gB4AAAAfAADAAYAAAAB//oABf//AHwAAACAAAUABgAAAAD/+gAFAAAAgAAAAIUABgAGAAAAAP/5
+AAYAAQCFAAAAiwAIAAYAAP////oABgAAAIsAAACSAAYABgAA////+gAFAAAAkgAAAJgABgAGAAAA
+AP/6AAUAAACYAAAAnQAGAAYAAP////oABQAAAJ0AAACjAAYABgAA////+gAFAAAAowAAAKkABgAG
+AAD////6AAUAAACpAAAArwAGAAYAAAAA//oABQAAAK8AAAC0AAYABgAA////+gAGAAAAtAAAALsA
+BgAGAAAAAP/6AAQAAAC7AAAAvwAGAAYAAP////oABQAAAL8AAADFAAYABgAA////+gAGAAAAxQAA
+AMwABgAGAAD////6AAUAAADMAAAA0gAGAAYAAP////oABQAAANIAAADYAAYABgAA////+gAGAAAA
+2AAAAN8ABgAGAAAAAP/6AAUAAADfAAAA5AAGAAYAAP////oABQAAAOQAAADqAAYABgAAAAD/+gAF
+AAEA6gAAAO8ABwAGAAD////6AAYAAADvAAAA9gAGAAYAAAAA//oABQAAAPYAAAD7AAYABgAA////
++gAFAAAA+wAAAQEABgAGAAD////6AAYAAAEBAAABCAAGAAYAAP////oABgAAAQgAAAEPAAYABgAA
+////+gAGAAABDwAAARYABgAGAAAAAP/6AAYAAAEWAAABHAAGAAYAAP////oABgAAARwAAAEjAAYA
+BgAAAAD/+gAFAAABIwAAASgABgAGAAAAAf/5AAQAAQEoAAABKwAIAAYAAAAA//kABAABASsAAAEv
+AAgABgAAAAH/+QAEAAEBLwAAATIACAAGAAAAAP/5AAX//AEyAAABNwADAAYAAAAAAAEABgACATcA
+AAE9AAEABgAAAAH/+QAE//wBPQAAAUAAAwAGAAAAAP/7AAYAAAFAAAABRgAFAAYAAP////kABQAA
+AUYAAAFMAAcABgAAAAD/+wAFAAABTAAAAVEABQAGAAAAAP/5AAYAAAFRAAABVwAHAAYAAAAA//sA
+BQAAAVcAAAFcAAUABgAAAAD/+QAFAAABXAAAAWEABwAGAAAAAP/7AAYAAgFhAAABZwAHAAYAAP//
+//kABQAAAWcAAAFtAAcABgAAAAD/+QAGAAABbQAAAXMABwAGAAAAAP/5AAQAAgFzAAABdwAJAAYA
+AP////kABgAAAXcAAAF+AAcABgAAAAD/+QAGAAABfgAAAYQABwAGAAD////7AAUAAAGEAAABigAF
+AAYAAP////sABQAAAYoAAAGQAAUABgAAAAD/+wAFAAABkAAAAZUABQAGAAD////7AAUAAgGVAAAB
+mwAHAAYAAAAA//sABgACAZsAAAGhAAcABgAAAAD/+wAGAAABoQAAAacABQAGAAAAAP/7AAYAAAGn
+AAABrQAFAAYAAAAA//kABgAAAa0AAAGzAAcABgAA////+wAGAAABswAAAboABQAGAAD////7AAUA
+AAG6AAABwAAFAAYAAP////sABgAAAcAAAAHHAAUABgAAAAD/+wAGAAABxwAAAc0ABQAGAAD////7
+AAYAAgHNAAAB1AAHAAYAAAAA//sABQAAAdQAAAHZAAUABgAAAAH/+QAFAAEB2QAAAd0ACAAGAAAA
+Av/6AAMAAQHdAAAB3gAHAAYAAAAA//kABAABAd4AAAHiAAgABgAAAAD/+wAF//0B4gAAAecAAgAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAB
+//sAAwACAecAAAHpAAcABgAAAAD/+QAFAAEB6QAAAe4ACAAGAAAAAP/5AAYAAAHuAAAB9AAHAAYA
+AAAA//oABf//AfQAAAH5AAUABgAAAAD/+QAGAAAB+QAAAf8ABwAGAAAAAv/5AAMAAgH/AAACAAAJ
+AAYAAAAA//kABQABAgAAAAIFAAgABgAAAAH/+gAE//sCBQAAAggAAQAGAAAAAP/5AAYAAAIIAAAC
+DgAHAAYAAAAB//kABf/+Ag4AAAISAAUABgAA////+wAGAAACEgAAAhkABQAGAAAAAP/7AAX//gIZ
+AAACHgADAAYAAAAA//wABf/9Ah4AAAIjAAEABgAAAAD/+QAHAAACIwAAAioABwAGAAAAAP/6AAT/
++wIqAAACLgABAAYAAAAA//kABP/8Ai4AAAIyAAMABgAAAAD/+gAFAAACMgAAAjcABgAGAAAAAf/5
+AAT//QI3AAACOgAEAAYAAAAB//kABP/9AjoAAAI9AAQABgAAAAL/+QAE//sCPQAAAj8AAgAGAAD/
+///7AAYAAgI/AAACRgAHAAYAAAAA//kABgABAkYAAAJMAAgABgAAAAH//AAD//0CTAAAAk4AAQAG
+AAAAAf//AAQAAgJOAAACUQADAAYAAAAB//kABP/9AlEAAAJUAAQABgAAAAH/+QAF//4CVAAAAlgA
+BQAGAAD////7AAYAAAJYAAACXwAFAAYAAP////kABgAAAl8AAAJmAAcABgAA////+QAGAAACZgAA
+Am0ABwAGAAD////5AAYAAAJtAAACdAAHAAYAAAAA//sABQACAnQAAAJ5AAcABgAA////9wAGAAAC
+eQAAAoAACQAGAAD////3AAYAAAKAAAAChwAJAAYAAP////cABgAAAocAAAKOAAkABgAA////9wAG
+AAACjgAAApUACQAGAAD////4AAYAAAKVAAACnAAIAAYAAP////cABgAAApwAAAKjAAkABgAA////
++gAGAAACowAAAqoABgAGAAAAAP/6AAUAAgKqAAACrwAIAAYAAP////cABQAAAq8AAAK1AAkABgAA
+////9wAFAAACtQAAArsACQAGAAD////3AAUAAAK7AAACwQAJAAYAAP////gABQAAAsEAAALHAAgA
+BgAAAAD/9wAEAAACxwAAAssACQAGAAAAAP/3AAQAAALLAAACzwAJAAYAAAAA//cABAAAAs8AAALT
+AAkABgAAAAD/+AAEAAAC0wAAAtcACAAGAAD////6AAUAAALXAAAC3QAGAAYAAP////cABgAAAt0A
+AALkAAkABgAAAAD/9wAFAAAC5AAAAukACQAGAAAAAP/3AAUAAALpAAAC7gAJAAYAAAAA//cABQAA
+Au4AAALzAAkABgAAAAD/9wAFAAAC8wAAAvgACQAGAAAAAP/4AAUAAAL4AAAC/QAIAAYAAAAA//oA
+Bf//Av0AAAMCAAUABgAA////+gAGAAADAgAAAwkABgAGAAD////3AAYAAAMJAAADEAAJAAYAAP//
+//cABgAAAxAAAAMXAAkABgAA////9wAGAAADFwAAAx4ACQAGAAD////4AAYAAAAAAAoABwASAAYA
+AP////cABgAAAAcACgAOABMABgAA////+gAFAAAADgAKABQAEAAGAAD////6AAYAAAAUAAoAGwAQ
+AAYAAAAA//gABgAAABsACgAhABIABgAAAAD/+AAGAAAAIQAKACcAEgAGAAAAAP/4AAYAAAAnAAoA
+LQASAAYAAAAA//gABgAAAC0ACgAzABIABgAAAAD/+QAGAAAAMwAKADkAEQAGAAAAAP/3AAYAAAA5
+AAoAPwATAAYAAP////sABQAAAD8ACgBFAA8ABgAAAAD/+wAFAAIARQAKAEoAEQAGAAAAAP/4AAUA
+AABKAAoATwASAAYAAAAA//gABQAAAE8ACgBUABIABgAAAAD/+AAFAAAAVAAKAFkAEgAGAAAAAP/5
+AAUAAABZAAoAXgARAAYAAAAA//gABgAAAF4ACgBkABIABgAAAAD/+AAGAAAAZAAKAGoAEgAGAAAA
+AP/4AAYAAABqAAoAcAASAAYAAAAA//kABgAAAHAACgB2ABEABgAAAAD/+AAFAAAAdgAKAHsAEgAG
+AAD////4AAYAAAB7AAoAggASAAYAAAAA//gABQAAAIIACgCHABIABgAAAAD/+AAFAAAAhwAKAIwA
+EgAGAAAAAP/4AAUAAACMAAoAkQASAAYAAAAA//gABQAAAJEACgCWABIABgAAAAD/+QAFAAAAlgAK
+AJsAEQAGAAAAAP/6AAX//wCbAAoAoAAPAAYAAAAA//oABQABAKAACgClABEABgAA////+AAGAAAA
+pQAKAKwAEgAGAAD////4AAYAAACsAAoAswASAAYAAP////gABgAAALMACgC6ABIABgAA////+QAG
+AAAAugAKAMEAEQAGAAD////4AAYAAgDBAAoAyAAUAAYAAP////kABQACAMgACgDOABMABgAA////
++QAGAAIAzgAKANUAEw==
+"""
+ )
+ ),
+ Image.open(
+ BytesIO(
+ base64.b64decode(
+ b"""
+iVBORw0KGgoAAAANSUhEUgAAAx4AAAAUAQAAAAArMtZoAAAEwElEQVR4nABlAJr/AHVE4czCI/4u
+Mc4b7vuds/xzjz5/3/7u/n9vMe7vnfH/9++vPn/xyf5zhxzjt8GHw8+2d83u8x27199/nxuQ6Od9
+M43/5z2I+9n9ZtmDBwMQECDRQw/eQIQohJXxpBCNVE6QCCAAAAD//wBlAJr/AgALyj1t/wINwq0g
+LeNZUworuN1cjTPIzrTX6ofHWeo3v336qPzfEwRmBnHTtf95/fglZK5N0PDgfRTslpGBvz7LFc4F
+IUXBWQGjQ5MGCx34EDFPwXiY4YbYxavpnhHFrk14CDAAAAD//wBlAJr/AgKqRooH2gAgPeggvUAA
+Bu2WfgPoAwzRAABAAAAAAACQgLz/3Uv4Gv+gX7BJgDeeGP6AAAD1NMDzKHD7ANWr3loYbxsAD791
+NAADfcoIDyP44K/jv4Y63/Z+t98Ovt+ub4T48LAAAAD//wBlAJr/AuplMlADJAAAAGuAphWpqhMx
+in0A/fRvAYBABPgBwBUgABBQ/sYAyv9g0bCHgOLoGAAAAAAAREAAwI7nr0ArYpow7aX8//9LaP/9
+SjdavWA8ePHeBIKB//81/83ndznOaXx379wAAAD//wBlAJr/AqDxW+D3AABAAbUh/QMnbQag/gAY
+AYDAAACgtgD/gOqAAAB5IA/8AAAk+n9w0AAA8AAAmFRJuPo27ciC0cD5oeW4E7KA/wD3ECMAn2tt
+y8PgwH8AfAxFzC0JzeAMtratAsC/ffwAAAD//wBlAJr/BGKAyCAA4AAAAvgeYTAwHd1kmQF5chkG
+ABoMIHcL5xVpTfQbUqzlAAAErwAQBgAAEOClA5D9il08AEh/tUzdCBsXkbgACED+woQg8Si9VeqY
+lODCn7lmF6NhnAEYgAAA/NMIAAAAAAD//2JgjLZgVGBg5Pv/Tvpc8hwGBjYGJADjHDrAwPzAjv/H
+/Wf3PzCwtzcwHmBgYGcwbZz8wHaCAQMDOwMDQ8MCBgYOC3W7mp+f0w+wHOYxO3OG+e376hsMZjk3
+AAAAAP//YmCMY2A4wMAIN5e5gQETPD6AZisDAwMDgzSDAAPjByiHcQMDAwMDg1nOze1lByRu5/47
+c4859311AYNZzg0AAAAA//9iYGDBYihOIIMuwIjGL39/fwffA8b//xv/P2BPtzzHwCBjUQAAAAD/
+/yLFBrIBAAAA//9i1HhcwdhizX7u8NZNzyLbvT97bfrMf/QHI8evOwcSqGUJAAAA//9iYBB81iSw
+pEE170Qrg5MIYydHqwdDQRMrAwcVrQAAAAD//2J4x7j9AAMDn8Q/BgYLBoaiAwwMjPdvMDBYM1Tv
+oJodAAAAAP//Yqo/83+dxePWlxl3npsel9lvLfPcqlE9725C+acfVLMEAAAA//9i+s9gwCoaaGMR
+evta/58PTEWzr21hufPjA8N+qlnBwAAAAAD//2JiWLci5v1+HmFXDqcnULE/MxgYGBj+f6CaJQAA
+AAD//2Ji2FrkY3iYpYC5qDeGgeEMAwPDvwQBBoYvcTwOVLMEAAAA//9isDBgkP///0EOg9z35v//
+Gc/eeW7BwPj5+QGZhANUswMAAAD//2JgqGBgYGBgqEMXlvhMPUsAAAAA//8iYDd1AAAAAP//AwDR
+w7IkEbzhVQAAAABJRU5ErkJggg==
+"""
+ )
+ )
+ ),
+ )
+ return f
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageGrab.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageGrab.py
new file mode 100644
index 00000000..bcfffc3d
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageGrab.py
@@ -0,0 +1,177 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# screen grabber
+#
+# History:
+# 2001-04-26 fl created
+# 2001-09-17 fl use builtin driver, if present
+# 2002-11-19 fl added grabclipboard support
+#
+# Copyright (c) 2001-2002 by Secret Labs AB
+# Copyright (c) 2001-2002 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+import io
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+from . import Image
+
+
+def grab(bbox=None, include_layered_windows=False, all_screens=False, xdisplay=None):
+ if xdisplay is None:
+ if sys.platform == "darwin":
+ fh, filepath = tempfile.mkstemp(".png")
+ os.close(fh)
+ args = ["screencapture"]
+ if bbox:
+ left, top, right, bottom = bbox
+ args += ["-R", f"{left},{top},{right-left},{bottom-top}"]
+ subprocess.call(args + ["-x", filepath])
+ im = Image.open(filepath)
+ im.load()
+ os.unlink(filepath)
+ if bbox:
+ im_resized = im.resize((right - left, bottom - top))
+ im.close()
+ return im_resized
+ return im
+ elif sys.platform == "win32":
+ offset, size, data = Image.core.grabscreen_win32(
+ include_layered_windows, all_screens
+ )
+ im = Image.frombytes(
+ "RGB",
+ size,
+ data,
+ # RGB, 32-bit line padding, origin lower left corner
+ "raw",
+ "BGR",
+ (size[0] * 3 + 3) & -4,
+ -1,
+ )
+ if bbox:
+ x0, y0 = offset
+ left, top, right, bottom = bbox
+ im = im.crop((left - x0, top - y0, right - x0, bottom - y0))
+ return im
+ try:
+ if not Image.core.HAVE_XCB:
+ msg = "Pillow was built without XCB support"
+ raise OSError(msg)
+ size, data = Image.core.grabscreen_x11(xdisplay)
+ except OSError:
+ if (
+ xdisplay is None
+ and sys.platform not in ("darwin", "win32")
+ and shutil.which("gnome-screenshot")
+ ):
+ fh, filepath = tempfile.mkstemp(".png")
+ os.close(fh)
+ subprocess.call(["gnome-screenshot", "-f", filepath])
+ im = Image.open(filepath)
+ im.load()
+ os.unlink(filepath)
+ if bbox:
+ im_cropped = im.crop(bbox)
+ im.close()
+ return im_cropped
+ return im
+ else:
+ raise
+ else:
+ im = Image.frombytes("RGB", size, data, "raw", "BGRX", size[0] * 4, 1)
+ if bbox:
+ im = im.crop(bbox)
+ return im
+
+
+def grabclipboard():
+ if sys.platform == "darwin":
+ fh, filepath = tempfile.mkstemp(".png")
+ os.close(fh)
+ commands = [
+ 'set theFile to (open for access POSIX file "'
+ + filepath
+ + '" with write permission)',
+ "try",
+ " write (the clipboard as «class PNGf») to theFile",
+ "end try",
+ "close access theFile",
+ ]
+ script = ["osascript"]
+ for command in commands:
+ script += ["-e", command]
+ subprocess.call(script)
+
+ im = None
+ if os.stat(filepath).st_size != 0:
+ im = Image.open(filepath)
+ im.load()
+ os.unlink(filepath)
+ return im
+ elif sys.platform == "win32":
+ fmt, data = Image.core.grabclipboard_win32()
+ if fmt == "file": # CF_HDROP
+ import struct
+
+ o = struct.unpack_from("I", data)[0]
+ if data[16] != 0:
+ files = data[o:].decode("utf-16le").split("\0")
+ else:
+ files = data[o:].decode("mbcs").split("\0")
+ return files[: files.index("")]
+ if isinstance(data, bytes):
+ data = io.BytesIO(data)
+ if fmt == "png":
+ from . import PngImagePlugin
+
+ return PngImagePlugin.PngImageFile(data)
+ elif fmt == "DIB":
+ from . import BmpImagePlugin
+
+ return BmpImagePlugin.DibImageFile(data)
+ return None
+ else:
+ if os.getenv("WAYLAND_DISPLAY"):
+ session_type = "wayland"
+ elif os.getenv("DISPLAY"):
+ session_type = "x11"
+ else: # Session type check failed
+ session_type = None
+
+ if shutil.which("wl-paste") and session_type in ("wayland", None):
+ output = subprocess.check_output(["wl-paste", "-l"]).decode()
+ mimetypes = output.splitlines()
+ if "image/png" in mimetypes:
+ mimetype = "image/png"
+ elif mimetypes:
+ mimetype = mimetypes[0]
+ else:
+ mimetype = None
+
+ args = ["wl-paste"]
+ if mimetype:
+ args.extend(["-t", mimetype])
+ elif shutil.which("xclip") and session_type in ("x11", None):
+ args = ["xclip", "-selection", "clipboard", "-t", "image/png", "-o"]
+ else:
+ msg = "wl-paste or xclip is required for ImageGrab.grabclipboard() on Linux"
+ raise NotImplementedError(msg)
+
+ p = subprocess.run(args, capture_output=True)
+ err = p.stderr
+ if err:
+ msg = f"{args[0]} error: {err.strip().decode()}"
+ raise ChildProcessError(msg)
+ data = io.BytesIO(p.stdout)
+ im = Image.open(data)
+ im.load()
+ return im
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageMath.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageMath.py
new file mode 100644
index 00000000..eb6bbe6c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageMath.py
@@ -0,0 +1,263 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# a simple math add-on for the Python Imaging Library
+#
+# History:
+# 1999-02-15 fl Original PIL Plus release
+# 2005-05-05 fl Simplified and cleaned up for PIL 1.1.6
+# 2005-09-12 fl Fixed int() and float() for Python 2.4.1
+#
+# Copyright (c) 1999-2005 by Secret Labs AB
+# Copyright (c) 2005 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+import builtins
+
+from . import Image, _imagingmath
+
+
+def _isconstant(v):
+ return isinstance(v, (int, float))
+
+
+class _Operand:
+ """Wraps an image operand, providing standard operators"""
+
+ def __init__(self, im):
+ self.im = im
+
+ def __fixup(self, im1):
+ # convert image to suitable mode
+ if isinstance(im1, _Operand):
+ # argument was an image.
+ if im1.im.mode in ("1", "L"):
+ return im1.im.convert("I")
+ elif im1.im.mode in ("I", "F"):
+ return im1.im
+ else:
+ msg = f"unsupported mode: {im1.im.mode}"
+ raise ValueError(msg)
+ else:
+ # argument was a constant
+ if _isconstant(im1) and self.im.mode in ("1", "L", "I"):
+ return Image.new("I", self.im.size, im1)
+ else:
+ return Image.new("F", self.im.size, im1)
+
+ def apply(self, op, im1, im2=None, mode=None):
+ im1 = self.__fixup(im1)
+ if im2 is None:
+ # unary operation
+ out = Image.new(mode or im1.mode, im1.size, None)
+ im1.load()
+ try:
+ op = getattr(_imagingmath, op + "_" + im1.mode)
+ except AttributeError as e:
+ msg = f"bad operand type for '{op}'"
+ raise TypeError(msg) from e
+ _imagingmath.unop(op, out.im.id, im1.im.id)
+ else:
+ # binary operation
+ im2 = self.__fixup(im2)
+ if im1.mode != im2.mode:
+ # convert both arguments to floating point
+ if im1.mode != "F":
+ im1 = im1.convert("F")
+ if im2.mode != "F":
+ im2 = im2.convert("F")
+ if im1.size != im2.size:
+ # crop both arguments to a common size
+ size = (min(im1.size[0], im2.size[0]), min(im1.size[1], im2.size[1]))
+ if im1.size != size:
+ im1 = im1.crop((0, 0) + size)
+ if im2.size != size:
+ im2 = im2.crop((0, 0) + size)
+ out = Image.new(mode or im1.mode, im1.size, None)
+ im1.load()
+ im2.load()
+ try:
+ op = getattr(_imagingmath, op + "_" + im1.mode)
+ except AttributeError as e:
+ msg = f"bad operand type for '{op}'"
+ raise TypeError(msg) from e
+ _imagingmath.binop(op, out.im.id, im1.im.id, im2.im.id)
+ return _Operand(out)
+
+ # unary operators
+ def __bool__(self):
+ # an image is "true" if it contains at least one non-zero pixel
+ return self.im.getbbox() is not None
+
+ def __abs__(self):
+ return self.apply("abs", self)
+
+ def __pos__(self):
+ return self
+
+ def __neg__(self):
+ return self.apply("neg", self)
+
+ # binary operators
+ def __add__(self, other):
+ return self.apply("add", self, other)
+
+ def __radd__(self, other):
+ return self.apply("add", other, self)
+
+ def __sub__(self, other):
+ return self.apply("sub", self, other)
+
+ def __rsub__(self, other):
+ return self.apply("sub", other, self)
+
+ def __mul__(self, other):
+ return self.apply("mul", self, other)
+
+ def __rmul__(self, other):
+ return self.apply("mul", other, self)
+
+ def __truediv__(self, other):
+ return self.apply("div", self, other)
+
+ def __rtruediv__(self, other):
+ return self.apply("div", other, self)
+
+ def __mod__(self, other):
+ return self.apply("mod", self, other)
+
+ def __rmod__(self, other):
+ return self.apply("mod", other, self)
+
+ def __pow__(self, other):
+ return self.apply("pow", self, other)
+
+ def __rpow__(self, other):
+ return self.apply("pow", other, self)
+
+ # bitwise
+ def __invert__(self):
+ return self.apply("invert", self)
+
+ def __and__(self, other):
+ return self.apply("and", self, other)
+
+ def __rand__(self, other):
+ return self.apply("and", other, self)
+
+ def __or__(self, other):
+ return self.apply("or", self, other)
+
+ def __ror__(self, other):
+ return self.apply("or", other, self)
+
+ def __xor__(self, other):
+ return self.apply("xor", self, other)
+
+ def __rxor__(self, other):
+ return self.apply("xor", other, self)
+
+ def __lshift__(self, other):
+ return self.apply("lshift", self, other)
+
+ def __rshift__(self, other):
+ return self.apply("rshift", self, other)
+
+ # logical
+ def __eq__(self, other):
+ return self.apply("eq", self, other)
+
+ def __ne__(self, other):
+ return self.apply("ne", self, other)
+
+ def __lt__(self, other):
+ return self.apply("lt", self, other)
+
+ def __le__(self, other):
+ return self.apply("le", self, other)
+
+ def __gt__(self, other):
+ return self.apply("gt", self, other)
+
+ def __ge__(self, other):
+ return self.apply("ge", self, other)
+
+
+# conversions
+def imagemath_int(self):
+ return _Operand(self.im.convert("I"))
+
+
+def imagemath_float(self):
+ return _Operand(self.im.convert("F"))
+
+
+# logical
+def imagemath_equal(self, other):
+ return self.apply("eq", self, other, mode="I")
+
+
+def imagemath_notequal(self, other):
+ return self.apply("ne", self, other, mode="I")
+
+
+def imagemath_min(self, other):
+ return self.apply("min", self, other)
+
+
+def imagemath_max(self, other):
+ return self.apply("max", self, other)
+
+
+def imagemath_convert(self, mode):
+ return _Operand(self.im.convert(mode))
+
+
+ops = {}
+for k, v in list(globals().items()):
+ if k[:10] == "imagemath_":
+ ops[k[10:]] = v
+
+
+def eval(expression, _dict={}, **kw):
+ """
+ Evaluates an image expression.
+
+ :param expression: A string containing a Python-style expression.
+ :param options: Values to add to the evaluation context. You
+ can either use a dictionary, or one or more keyword
+ arguments.
+ :return: The evaluated expression. This is usually an image object, but can
+ also be an integer, a floating point value, or a pixel tuple,
+ depending on the expression.
+ """
+
+ # build execution namespace
+ args = ops.copy()
+ args.update(_dict)
+ args.update(kw)
+ for k, v in list(args.items()):
+ if hasattr(v, "im"):
+ args[k] = _Operand(v)
+
+ compiled_code = compile(expression, "", "eval")
+
+ def scan(code):
+ for const in code.co_consts:
+ if type(const) is type(compiled_code):
+ scan(const)
+
+ for name in code.co_names:
+ if name not in args and name != "abs":
+ msg = f"'{name}' not allowed"
+ raise ValueError(msg)
+
+ scan(compiled_code)
+ out = builtins.eval(expression, {"__builtins": {"abs": abs}}, args)
+ try:
+ return out.im
+ except AttributeError:
+ return out
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageMode.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageMode.py
new file mode 100644
index 00000000..a0b33514
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageMode.py
@@ -0,0 +1,90 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# standard mode descriptors
+#
+# History:
+# 2006-03-20 fl Added
+#
+# Copyright (c) 2006 by Secret Labs AB.
+# Copyright (c) 2006 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+
+import sys
+
+# mode descriptor cache
+_modes = None
+
+
+class ModeDescriptor:
+ """Wrapper for mode strings."""
+
+ def __init__(self, mode, bands, basemode, basetype, typestr):
+ self.mode = mode
+ self.bands = bands
+ self.basemode = basemode
+ self.basetype = basetype
+ self.typestr = typestr
+
+ def __str__(self):
+ return self.mode
+
+
+def getmode(mode):
+ """Gets a mode descriptor for the given mode."""
+ global _modes
+ if not _modes:
+ # initialize mode cache
+ modes = {}
+ endian = "<" if sys.byteorder == "little" else ">"
+ for m, (basemode, basetype, bands, typestr) in {
+ # core modes
+ # Bits need to be extended to bytes
+ "1": ("L", "L", ("1",), "|b1"),
+ "L": ("L", "L", ("L",), "|u1"),
+ "I": ("L", "I", ("I",), endian + "i4"),
+ "F": ("L", "F", ("F",), endian + "f4"),
+ "P": ("P", "L", ("P",), "|u1"),
+ "RGB": ("RGB", "L", ("R", "G", "B"), "|u1"),
+ "RGBX": ("RGB", "L", ("R", "G", "B", "X"), "|u1"),
+ "RGBA": ("RGB", "L", ("R", "G", "B", "A"), "|u1"),
+ "CMYK": ("RGB", "L", ("C", "M", "Y", "K"), "|u1"),
+ "YCbCr": ("RGB", "L", ("Y", "Cb", "Cr"), "|u1"),
+ # UNDONE - unsigned |u1i1i1
+ "LAB": ("RGB", "L", ("L", "A", "B"), "|u1"),
+ "HSV": ("RGB", "L", ("H", "S", "V"), "|u1"),
+ # extra experimental modes
+ "RGBa": ("RGB", "L", ("R", "G", "B", "a"), "|u1"),
+ "BGR;15": ("RGB", "L", ("B", "G", "R"), "|u1"),
+ "BGR;16": ("RGB", "L", ("B", "G", "R"), "|u1"),
+ "BGR;24": ("RGB", "L", ("B", "G", "R"), "|u1"),
+ "LA": ("L", "L", ("L", "A"), "|u1"),
+ "La": ("L", "L", ("L", "a"), "|u1"),
+ "PA": ("RGB", "L", ("P", "A"), "|u1"),
+ }.items():
+ modes[m] = ModeDescriptor(m, bands, basemode, basetype, typestr)
+ # mapping modes
+ for i16mode, typestr in {
+ # I;16 == I;16L, and I;32 == I;32L
+ "I;16": "u2",
+ "I;16BS": ">i2",
+ "I;16N": endian + "u2",
+ "I;16NS": endian + "i2",
+ "I;32": "u4",
+ "I;32L": "i4",
+ "I;32LS": "
+
+import re
+
+from . import Image, _imagingmorph
+
+LUT_SIZE = 1 << 9
+
+# fmt: off
+ROTATION_MATRIX = [
+ 6, 3, 0,
+ 7, 4, 1,
+ 8, 5, 2,
+]
+MIRROR_MATRIX = [
+ 2, 1, 0,
+ 5, 4, 3,
+ 8, 7, 6,
+]
+# fmt: on
+
+
+class LutBuilder:
+ """A class for building a MorphLut from a descriptive language
+
+ The input patterns is a list of a strings sequences like these::
+
+ 4:(...
+ .1.
+ 111)->1
+
+ (whitespaces including linebreaks are ignored). The option 4
+ describes a series of symmetry operations (in this case a
+ 4-rotation), the pattern is described by:
+
+ - . or X - Ignore
+ - 1 - Pixel is on
+ - 0 - Pixel is off
+
+ The result of the operation is described after "->" string.
+
+ The default is to return the current pixel value, which is
+ returned if no other match is found.
+
+ Operations:
+
+ - 4 - 4 way rotation
+ - N - Negate
+ - 1 - Dummy op for no other operation (an op must always be given)
+ - M - Mirroring
+
+ Example::
+
+ lb = LutBuilder(patterns = ["4:(... .1. 111)->1"])
+ lut = lb.build_lut()
+
+ """
+
+ def __init__(self, patterns=None, op_name=None):
+ if patterns is not None:
+ self.patterns = patterns
+ else:
+ self.patterns = []
+ self.lut = None
+ if op_name is not None:
+ known_patterns = {
+ "corner": ["1:(... ... ...)->0", "4:(00. 01. ...)->1"],
+ "dilation4": ["4:(... .0. .1.)->1"],
+ "dilation8": ["4:(... .0. .1.)->1", "4:(... .0. ..1)->1"],
+ "erosion4": ["4:(... .1. .0.)->0"],
+ "erosion8": ["4:(... .1. .0.)->0", "4:(... .1. ..0)->0"],
+ "edge": [
+ "1:(... ... ...)->0",
+ "4:(.0. .1. ...)->1",
+ "4:(01. .1. ...)->1",
+ ],
+ }
+ if op_name not in known_patterns:
+ msg = "Unknown pattern " + op_name + "!"
+ raise Exception(msg)
+
+ self.patterns = known_patterns[op_name]
+
+ def add_patterns(self, patterns):
+ self.patterns += patterns
+
+ def build_default_lut(self):
+ symbols = [0, 1]
+ m = 1 << 4 # pos of current pixel
+ self.lut = bytearray(symbols[(i & m) > 0] for i in range(LUT_SIZE))
+
+ def get_lut(self):
+ return self.lut
+
+ def _string_permute(self, pattern, permutation):
+ """string_permute takes a pattern and a permutation and returns the
+ string permuted according to the permutation list.
+ """
+ assert len(permutation) == 9
+ return "".join(pattern[p] for p in permutation)
+
+ def _pattern_permute(self, basic_pattern, options, basic_result):
+ """pattern_permute takes a basic pattern and its result and clones
+ the pattern according to the modifications described in the $options
+ parameter. It returns a list of all cloned patterns."""
+ patterns = [(basic_pattern, basic_result)]
+
+ # rotations
+ if "4" in options:
+ res = patterns[-1][1]
+ for i in range(4):
+ patterns.append(
+ (self._string_permute(patterns[-1][0], ROTATION_MATRIX), res)
+ )
+ # mirror
+ if "M" in options:
+ n = len(patterns)
+ for pattern, res in patterns[:n]:
+ patterns.append((self._string_permute(pattern, MIRROR_MATRIX), res))
+
+ # negate
+ if "N" in options:
+ n = len(patterns)
+ for pattern, res in patterns[:n]:
+ # Swap 0 and 1
+ pattern = pattern.replace("0", "Z").replace("1", "0").replace("Z", "1")
+ res = 1 - int(res)
+ patterns.append((pattern, res))
+
+ return patterns
+
+ def build_lut(self):
+ """Compile all patterns into a morphology lut.
+
+ TBD :Build based on (file) morphlut:modify_lut
+ """
+ self.build_default_lut()
+ patterns = []
+
+ # Parse and create symmetries of the patterns strings
+ for p in self.patterns:
+ m = re.search(r"(\w*):?\s*\((.+?)\)\s*->\s*(\d)", p.replace("\n", ""))
+ if not m:
+ msg = 'Syntax error in pattern "' + p + '"'
+ raise Exception(msg)
+ options = m.group(1)
+ pattern = m.group(2)
+ result = int(m.group(3))
+
+ # Get rid of spaces
+ pattern = pattern.replace(" ", "").replace("\n", "")
+
+ patterns += self._pattern_permute(pattern, options, result)
+
+ # compile the patterns into regular expressions for speed
+ for i, pattern in enumerate(patterns):
+ p = pattern[0].replace(".", "X").replace("X", "[01]")
+ p = re.compile(p)
+ patterns[i] = (p, pattern[1])
+
+ # Step through table and find patterns that match.
+ # Note that all the patterns are searched. The last one
+ # caught overrides
+ for i in range(LUT_SIZE):
+ # Build the bit pattern
+ bitpattern = bin(i)[2:]
+ bitpattern = ("0" * (9 - len(bitpattern)) + bitpattern)[::-1]
+
+ for p, r in patterns:
+ if p.match(bitpattern):
+ self.lut[i] = [0, 1][r]
+
+ return self.lut
+
+
+class MorphOp:
+ """A class for binary morphological operators"""
+
+ def __init__(self, lut=None, op_name=None, patterns=None):
+ """Create a binary morphological operator"""
+ self.lut = lut
+ if op_name is not None:
+ self.lut = LutBuilder(op_name=op_name).build_lut()
+ elif patterns is not None:
+ self.lut = LutBuilder(patterns=patterns).build_lut()
+
+ def apply(self, image):
+ """Run a single morphological operation on an image
+
+ Returns a tuple of the number of changed pixels and the
+ morphed image"""
+ if self.lut is None:
+ msg = "No operator loaded"
+ raise Exception(msg)
+
+ if image.mode != "L":
+ msg = "Image mode must be L"
+ raise ValueError(msg)
+ outimage = Image.new(image.mode, image.size, None)
+ count = _imagingmorph.apply(bytes(self.lut), image.im.id, outimage.im.id)
+ return count, outimage
+
+ def match(self, image):
+ """Get a list of coordinates matching the morphological operation on
+ an image.
+
+ Returns a list of tuples of (x,y) coordinates
+ of all matching pixels. See :ref:`coordinate-system`."""
+ if self.lut is None:
+ msg = "No operator loaded"
+ raise Exception(msg)
+
+ if image.mode != "L":
+ msg = "Image mode must be L"
+ raise ValueError(msg)
+ return _imagingmorph.match(bytes(self.lut), image.im.id)
+
+ def get_on_pixels(self, image):
+ """Get a list of all turned on pixels in a binary image
+
+ Returns a list of tuples of (x,y) coordinates
+ of all matching pixels. See :ref:`coordinate-system`."""
+
+ if image.mode != "L":
+ msg = "Image mode must be L"
+ raise ValueError(msg)
+ return _imagingmorph.get_on_pixels(image.im.id)
+
+ def load_lut(self, filename):
+ """Load an operator from an mrl file"""
+ with open(filename, "rb") as f:
+ self.lut = bytearray(f.read())
+
+ if len(self.lut) != LUT_SIZE:
+ self.lut = None
+ msg = "Wrong size operator file!"
+ raise Exception(msg)
+
+ def save_lut(self, filename):
+ """Save an operator to an mrl file"""
+ if self.lut is None:
+ msg = "No operator loaded"
+ raise Exception(msg)
+ with open(filename, "wb") as f:
+ f.write(self.lut)
+
+ def set_lut(self, lut):
+ """Set the lut from an external source"""
+ self.lut = lut
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageOps.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageOps.py
new file mode 100644
index 00000000..42f2152b
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageOps.py
@@ -0,0 +1,658 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# standard image operations
+#
+# History:
+# 2001-10-20 fl Created
+# 2001-10-23 fl Added autocontrast operator
+# 2001-12-18 fl Added Kevin's fit operator
+# 2004-03-14 fl Fixed potential division by zero in equalize
+# 2005-05-05 fl Fixed equalize for low number of values
+#
+# Copyright (c) 2001-2004 by Secret Labs AB
+# Copyright (c) 2001-2004 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+import functools
+import operator
+import re
+
+from . import ExifTags, Image, ImagePalette
+
+#
+# helpers
+
+
+def _border(border):
+ if isinstance(border, tuple):
+ if len(border) == 2:
+ left, top = right, bottom = border
+ elif len(border) == 4:
+ left, top, right, bottom = border
+ else:
+ left = top = right = bottom = border
+ return left, top, right, bottom
+
+
+def _color(color, mode):
+ if isinstance(color, str):
+ from . import ImageColor
+
+ color = ImageColor.getcolor(color, mode)
+ return color
+
+
+def _lut(image, lut):
+ if image.mode == "P":
+ # FIXME: apply to lookup table, not image data
+ msg = "mode P support coming soon"
+ raise NotImplementedError(msg)
+ elif image.mode in ("L", "RGB"):
+ if image.mode == "RGB" and len(lut) == 256:
+ lut = lut + lut + lut
+ return image.point(lut)
+ else:
+ msg = "not supported for this image mode"
+ raise OSError(msg)
+
+
+#
+# actions
+
+
+def autocontrast(image, cutoff=0, ignore=None, mask=None, preserve_tone=False):
+ """
+ Maximize (normalize) image contrast. This function calculates a
+ histogram of the input image (or mask region), removes ``cutoff`` percent of the
+ lightest and darkest pixels from the histogram, and remaps the image
+ so that the darkest pixel becomes black (0), and the lightest
+ becomes white (255).
+
+ :param image: The image to process.
+ :param cutoff: The percent to cut off from the histogram on the low and
+ high ends. Either a tuple of (low, high), or a single
+ number for both.
+ :param ignore: The background pixel value (use None for no background).
+ :param mask: Histogram used in contrast operation is computed using pixels
+ within the mask. If no mask is given the entire image is used
+ for histogram computation.
+ :param preserve_tone: Preserve image tone in Photoshop-like style autocontrast.
+
+ .. versionadded:: 8.2.0
+
+ :return: An image.
+ """
+ if preserve_tone:
+ histogram = image.convert("L").histogram(mask)
+ else:
+ histogram = image.histogram(mask)
+
+ lut = []
+ for layer in range(0, len(histogram), 256):
+ h = histogram[layer : layer + 256]
+ if ignore is not None:
+ # get rid of outliers
+ try:
+ h[ignore] = 0
+ except TypeError:
+ # assume sequence
+ for ix in ignore:
+ h[ix] = 0
+ if cutoff:
+ # cut off pixels from both ends of the histogram
+ if not isinstance(cutoff, tuple):
+ cutoff = (cutoff, cutoff)
+ # get number of pixels
+ n = 0
+ for ix in range(256):
+ n = n + h[ix]
+ # remove cutoff% pixels from the low end
+ cut = n * cutoff[0] // 100
+ for lo in range(256):
+ if cut > h[lo]:
+ cut = cut - h[lo]
+ h[lo] = 0
+ else:
+ h[lo] -= cut
+ cut = 0
+ if cut <= 0:
+ break
+ # remove cutoff% samples from the high end
+ cut = n * cutoff[1] // 100
+ for hi in range(255, -1, -1):
+ if cut > h[hi]:
+ cut = cut - h[hi]
+ h[hi] = 0
+ else:
+ h[hi] -= cut
+ cut = 0
+ if cut <= 0:
+ break
+ # find lowest/highest samples after preprocessing
+ for lo in range(256):
+ if h[lo]:
+ break
+ for hi in range(255, -1, -1):
+ if h[hi]:
+ break
+ if hi <= lo:
+ # don't bother
+ lut.extend(list(range(256)))
+ else:
+ scale = 255.0 / (hi - lo)
+ offset = -lo * scale
+ for ix in range(256):
+ ix = int(ix * scale + offset)
+ if ix < 0:
+ ix = 0
+ elif ix > 255:
+ ix = 255
+ lut.append(ix)
+ return _lut(image, lut)
+
+
+def colorize(image, black, white, mid=None, blackpoint=0, whitepoint=255, midpoint=127):
+ """
+ Colorize grayscale image.
+ This function calculates a color wedge which maps all black pixels in
+ the source image to the first color and all white pixels to the
+ second color. If ``mid`` is specified, it uses three-color mapping.
+ The ``black`` and ``white`` arguments should be RGB tuples or color names;
+ optionally you can use three-color mapping by also specifying ``mid``.
+ Mapping positions for any of the colors can be specified
+ (e.g. ``blackpoint``), where these parameters are the integer
+ value corresponding to where the corresponding color should be mapped.
+ These parameters must have logical order, such that
+ ``blackpoint <= midpoint <= whitepoint`` (if ``mid`` is specified).
+
+ :param image: The image to colorize.
+ :param black: The color to use for black input pixels.
+ :param white: The color to use for white input pixels.
+ :param mid: The color to use for midtone input pixels.
+ :param blackpoint: an int value [0, 255] for the black mapping.
+ :param whitepoint: an int value [0, 255] for the white mapping.
+ :param midpoint: an int value [0, 255] for the midtone mapping.
+ :return: An image.
+ """
+
+ # Initial asserts
+ assert image.mode == "L"
+ if mid is None:
+ assert 0 <= blackpoint <= whitepoint <= 255
+ else:
+ assert 0 <= blackpoint <= midpoint <= whitepoint <= 255
+
+ # Define colors from arguments
+ black = _color(black, "RGB")
+ white = _color(white, "RGB")
+ if mid is not None:
+ mid = _color(mid, "RGB")
+
+ # Empty lists for the mapping
+ red = []
+ green = []
+ blue = []
+
+ # Create the low-end values
+ for i in range(0, blackpoint):
+ red.append(black[0])
+ green.append(black[1])
+ blue.append(black[2])
+
+ # Create the mapping (2-color)
+ if mid is None:
+ range_map = range(0, whitepoint - blackpoint)
+
+ for i in range_map:
+ red.append(black[0] + i * (white[0] - black[0]) // len(range_map))
+ green.append(black[1] + i * (white[1] - black[1]) // len(range_map))
+ blue.append(black[2] + i * (white[2] - black[2]) // len(range_map))
+
+ # Create the mapping (3-color)
+ else:
+ range_map1 = range(0, midpoint - blackpoint)
+ range_map2 = range(0, whitepoint - midpoint)
+
+ for i in range_map1:
+ red.append(black[0] + i * (mid[0] - black[0]) // len(range_map1))
+ green.append(black[1] + i * (mid[1] - black[1]) // len(range_map1))
+ blue.append(black[2] + i * (mid[2] - black[2]) // len(range_map1))
+ for i in range_map2:
+ red.append(mid[0] + i * (white[0] - mid[0]) // len(range_map2))
+ green.append(mid[1] + i * (white[1] - mid[1]) // len(range_map2))
+ blue.append(mid[2] + i * (white[2] - mid[2]) // len(range_map2))
+
+ # Create the high-end values
+ for i in range(0, 256 - whitepoint):
+ red.append(white[0])
+ green.append(white[1])
+ blue.append(white[2])
+
+ # Return converted image
+ image = image.convert("RGB")
+ return _lut(image, red + green + blue)
+
+
+def contain(image, size, method=Image.Resampling.BICUBIC):
+ """
+ Returns a resized version of the image, set to the maximum width and height
+ within the requested size, while maintaining the original aspect ratio.
+
+ :param image: The image to resize.
+ :param size: The requested output size in pixels, given as a
+ (width, height) tuple.
+ :param method: Resampling method to use. Default is
+ :py:attr:`~PIL.Image.Resampling.BICUBIC`.
+ See :ref:`concept-filters`.
+ :return: An image.
+ """
+
+ im_ratio = image.width / image.height
+ dest_ratio = size[0] / size[1]
+
+ if im_ratio != dest_ratio:
+ if im_ratio > dest_ratio:
+ new_height = round(image.height / image.width * size[0])
+ if new_height != size[1]:
+ size = (size[0], new_height)
+ else:
+ new_width = round(image.width / image.height * size[1])
+ if new_width != size[0]:
+ size = (new_width, size[1])
+ return image.resize(size, resample=method)
+
+
+def cover(image, size, method=Image.Resampling.BICUBIC):
+ """
+ Returns a resized version of the image, so that the requested size is
+ covered, while maintaining the original aspect ratio.
+
+ :param image: The image to resize.
+ :param size: The requested output size in pixels, given as a
+ (width, height) tuple.
+ :param method: Resampling method to use. Default is
+ :py:attr:`~PIL.Image.Resampling.BICUBIC`.
+ See :ref:`concept-filters`.
+ :return: An image.
+ """
+
+ im_ratio = image.width / image.height
+ dest_ratio = size[0] / size[1]
+
+ if im_ratio != dest_ratio:
+ if im_ratio < dest_ratio:
+ new_height = round(image.height / image.width * size[0])
+ if new_height != size[1]:
+ size = (size[0], new_height)
+ else:
+ new_width = round(image.width / image.height * size[1])
+ if new_width != size[0]:
+ size = (new_width, size[1])
+ return image.resize(size, resample=method)
+
+
+def pad(image, size, method=Image.Resampling.BICUBIC, color=None, centering=(0.5, 0.5)):
+ """
+ Returns a resized and padded version of the image, expanded to fill the
+ requested aspect ratio and size.
+
+ :param image: The image to resize and crop.
+ :param size: The requested output size in pixels, given as a
+ (width, height) tuple.
+ :param method: Resampling method to use. Default is
+ :py:attr:`~PIL.Image.Resampling.BICUBIC`.
+ See :ref:`concept-filters`.
+ :param color: The background color of the padded image.
+ :param centering: Control the position of the original image within the
+ padded version.
+
+ (0.5, 0.5) will keep the image centered
+ (0, 0) will keep the image aligned to the top left
+ (1, 1) will keep the image aligned to the bottom
+ right
+ :return: An image.
+ """
+
+ resized = contain(image, size, method)
+ if resized.size == size:
+ out = resized
+ else:
+ out = Image.new(image.mode, size, color)
+ if resized.palette:
+ out.putpalette(resized.getpalette())
+ if resized.width != size[0]:
+ x = round((size[0] - resized.width) * max(0, min(centering[0], 1)))
+ out.paste(resized, (x, 0))
+ else:
+ y = round((size[1] - resized.height) * max(0, min(centering[1], 1)))
+ out.paste(resized, (0, y))
+ return out
+
+
+def crop(image, border=0):
+ """
+ Remove border from image. The same amount of pixels are removed
+ from all four sides. This function works on all image modes.
+
+ .. seealso:: :py:meth:`~PIL.Image.Image.crop`
+
+ :param image: The image to crop.
+ :param border: The number of pixels to remove.
+ :return: An image.
+ """
+ left, top, right, bottom = _border(border)
+ return image.crop((left, top, image.size[0] - right, image.size[1] - bottom))
+
+
+def scale(image, factor, resample=Image.Resampling.BICUBIC):
+ """
+ Returns a rescaled image by a specific factor given in parameter.
+ A factor greater than 1 expands the image, between 0 and 1 contracts the
+ image.
+
+ :param image: The image to rescale.
+ :param factor: The expansion factor, as a float.
+ :param resample: Resampling method to use. Default is
+ :py:attr:`~PIL.Image.Resampling.BICUBIC`.
+ See :ref:`concept-filters`.
+ :returns: An :py:class:`~PIL.Image.Image` object.
+ """
+ if factor == 1:
+ return image.copy()
+ elif factor <= 0:
+ msg = "the factor must be greater than 0"
+ raise ValueError(msg)
+ else:
+ size = (round(factor * image.width), round(factor * image.height))
+ return image.resize(size, resample)
+
+
+def deform(image, deformer, resample=Image.Resampling.BILINEAR):
+ """
+ Deform the image.
+
+ :param image: The image to deform.
+ :param deformer: A deformer object. Any object that implements a
+ ``getmesh`` method can be used.
+ :param resample: An optional resampling filter. Same values possible as
+ in the PIL.Image.transform function.
+ :return: An image.
+ """
+ return image.transform(
+ image.size, Image.Transform.MESH, deformer.getmesh(image), resample
+ )
+
+
+def equalize(image, mask=None):
+ """
+ Equalize the image histogram. This function applies a non-linear
+ mapping to the input image, in order to create a uniform
+ distribution of grayscale values in the output image.
+
+ :param image: The image to equalize.
+ :param mask: An optional mask. If given, only the pixels selected by
+ the mask are included in the analysis.
+ :return: An image.
+ """
+ if image.mode == "P":
+ image = image.convert("RGB")
+ h = image.histogram(mask)
+ lut = []
+ for b in range(0, len(h), 256):
+ histo = [_f for _f in h[b : b + 256] if _f]
+ if len(histo) <= 1:
+ lut.extend(list(range(256)))
+ else:
+ step = (functools.reduce(operator.add, histo) - histo[-1]) // 255
+ if not step:
+ lut.extend(list(range(256)))
+ else:
+ n = step // 2
+ for i in range(256):
+ lut.append(n // step)
+ n = n + h[i + b]
+ return _lut(image, lut)
+
+
+def expand(image, border=0, fill=0):
+ """
+ Add border to the image
+
+ :param image: The image to expand.
+ :param border: Border width, in pixels.
+ :param fill: Pixel fill value (a color value). Default is 0 (black).
+ :return: An image.
+ """
+ left, top, right, bottom = _border(border)
+ width = left + image.size[0] + right
+ height = top + image.size[1] + bottom
+ color = _color(fill, image.mode)
+ if image.palette:
+ palette = ImagePalette.ImagePalette(palette=image.getpalette())
+ if isinstance(color, tuple):
+ color = palette.getcolor(color)
+ else:
+ palette = None
+ out = Image.new(image.mode, (width, height), color)
+ if palette:
+ out.putpalette(palette.palette)
+ out.paste(image, (left, top))
+ return out
+
+
+def fit(image, size, method=Image.Resampling.BICUBIC, bleed=0.0, centering=(0.5, 0.5)):
+ """
+ Returns a resized and cropped version of the image, cropped to the
+ requested aspect ratio and size.
+
+ This function was contributed by Kevin Cazabon.
+
+ :param image: The image to resize and crop.
+ :param size: The requested output size in pixels, given as a
+ (width, height) tuple.
+ :param method: Resampling method to use. Default is
+ :py:attr:`~PIL.Image.Resampling.BICUBIC`.
+ See :ref:`concept-filters`.
+ :param bleed: Remove a border around the outside of the image from all
+ four edges. The value is a decimal percentage (use 0.01 for
+ one percent). The default value is 0 (no border).
+ Cannot be greater than or equal to 0.5.
+ :param centering: Control the cropping position. Use (0.5, 0.5) for
+ center cropping (e.g. if cropping the width, take 50% off
+ of the left side, and therefore 50% off the right side).
+ (0.0, 0.0) will crop from the top left corner (i.e. if
+ cropping the width, take all of the crop off of the right
+ side, and if cropping the height, take all of it off the
+ bottom). (1.0, 0.0) will crop from the bottom left
+ corner, etc. (i.e. if cropping the width, take all of the
+ crop off the left side, and if cropping the height take
+ none from the top, and therefore all off the bottom).
+ :return: An image.
+ """
+
+ # by Kevin Cazabon, Feb 17/2000
+ # kevin@cazabon.com
+ # https://www.cazabon.com
+
+ # ensure centering is mutable
+ centering = list(centering)
+
+ if not 0.0 <= centering[0] <= 1.0:
+ centering[0] = 0.5
+ if not 0.0 <= centering[1] <= 1.0:
+ centering[1] = 0.5
+
+ if not 0.0 <= bleed < 0.5:
+ bleed = 0.0
+
+ # calculate the area to use for resizing and cropping, subtracting
+ # the 'bleed' around the edges
+
+ # number of pixels to trim off on Top and Bottom, Left and Right
+ bleed_pixels = (bleed * image.size[0], bleed * image.size[1])
+
+ live_size = (
+ image.size[0] - bleed_pixels[0] * 2,
+ image.size[1] - bleed_pixels[1] * 2,
+ )
+
+ # calculate the aspect ratio of the live_size
+ live_size_ratio = live_size[0] / live_size[1]
+
+ # calculate the aspect ratio of the output image
+ output_ratio = size[0] / size[1]
+
+ # figure out if the sides or top/bottom will be cropped off
+ if live_size_ratio == output_ratio:
+ # live_size is already the needed ratio
+ crop_width = live_size[0]
+ crop_height = live_size[1]
+ elif live_size_ratio >= output_ratio:
+ # live_size is wider than what's needed, crop the sides
+ crop_width = output_ratio * live_size[1]
+ crop_height = live_size[1]
+ else:
+ # live_size is taller than what's needed, crop the top and bottom
+ crop_width = live_size[0]
+ crop_height = live_size[0] / output_ratio
+
+ # make the crop
+ crop_left = bleed_pixels[0] + (live_size[0] - crop_width) * centering[0]
+ crop_top = bleed_pixels[1] + (live_size[1] - crop_height) * centering[1]
+
+ crop = (crop_left, crop_top, crop_left + crop_width, crop_top + crop_height)
+
+ # resize the image and return it
+ return image.resize(size, method, box=crop)
+
+
+def flip(image):
+ """
+ Flip the image vertically (top to bottom).
+
+ :param image: The image to flip.
+ :return: An image.
+ """
+ return image.transpose(Image.Transpose.FLIP_TOP_BOTTOM)
+
+
+def grayscale(image):
+ """
+ Convert the image to grayscale.
+
+ :param image: The image to convert.
+ :return: An image.
+ """
+ return image.convert("L")
+
+
+def invert(image):
+ """
+ Invert (negate) the image.
+
+ :param image: The image to invert.
+ :return: An image.
+ """
+ lut = []
+ for i in range(256):
+ lut.append(255 - i)
+ return image.point(lut) if image.mode == "1" else _lut(image, lut)
+
+
+def mirror(image):
+ """
+ Flip image horizontally (left to right).
+
+ :param image: The image to mirror.
+ :return: An image.
+ """
+ return image.transpose(Image.Transpose.FLIP_LEFT_RIGHT)
+
+
+def posterize(image, bits):
+ """
+ Reduce the number of bits for each color channel.
+
+ :param image: The image to posterize.
+ :param bits: The number of bits to keep for each channel (1-8).
+ :return: An image.
+ """
+ lut = []
+ mask = ~(2 ** (8 - bits) - 1)
+ for i in range(256):
+ lut.append(i & mask)
+ return _lut(image, lut)
+
+
+def solarize(image, threshold=128):
+ """
+ Invert all pixel values above a threshold.
+
+ :param image: The image to solarize.
+ :param threshold: All pixels above this greyscale level are inverted.
+ :return: An image.
+ """
+ lut = []
+ for i in range(256):
+ if i < threshold:
+ lut.append(i)
+ else:
+ lut.append(255 - i)
+ return _lut(image, lut)
+
+
+def exif_transpose(image, *, in_place=False):
+ """
+ If an image has an EXIF Orientation tag, other than 1, transpose the image
+ accordingly, and remove the orientation data.
+
+ :param image: The image to transpose.
+ :param in_place: Boolean. Keyword-only argument.
+ If ``True``, the original image is modified in-place, and ``None`` is returned.
+ If ``False`` (default), a new :py:class:`~PIL.Image.Image` object is returned
+ with the transposition applied. If there is no transposition, a copy of the
+ image will be returned.
+ """
+ image.load()
+ image_exif = image.getexif()
+ orientation = image_exif.get(ExifTags.Base.Orientation)
+ method = {
+ 2: Image.Transpose.FLIP_LEFT_RIGHT,
+ 3: Image.Transpose.ROTATE_180,
+ 4: Image.Transpose.FLIP_TOP_BOTTOM,
+ 5: Image.Transpose.TRANSPOSE,
+ 6: Image.Transpose.ROTATE_270,
+ 7: Image.Transpose.TRANSVERSE,
+ 8: Image.Transpose.ROTATE_90,
+ }.get(orientation)
+ if method is not None:
+ transposed_image = image.transpose(method)
+ if in_place:
+ image.im = transposed_image.im
+ image.pyaccess = None
+ image._size = transposed_image._size
+ exif_image = image if in_place else transposed_image
+
+ exif = exif_image.getexif()
+ if ExifTags.Base.Orientation in exif:
+ del exif[ExifTags.Base.Orientation]
+ if "exif" in exif_image.info:
+ exif_image.info["exif"] = exif.tobytes()
+ elif "Raw profile type exif" in exif_image.info:
+ exif_image.info["Raw profile type exif"] = exif.tobytes().hex()
+ elif "XML:com.adobe.xmp" in exif_image.info:
+ for pattern in (
+ r'tiff:Orientation="([0-9])"',
+ r"([0-9])",
+ ):
+ exif_image.info["XML:com.adobe.xmp"] = re.sub(
+ pattern, "", exif_image.info["XML:com.adobe.xmp"]
+ )
+ if not in_place:
+ return transposed_image
+ elif not in_place:
+ return image.copy()
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImagePalette.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImagePalette.py
new file mode 100644
index 00000000..f0c09470
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImagePalette.py
@@ -0,0 +1,266 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# image palette object
+#
+# History:
+# 1996-03-11 fl Rewritten.
+# 1997-01-03 fl Up and running.
+# 1997-08-23 fl Added load hack
+# 2001-04-16 fl Fixed randint shadow bug in random()
+#
+# Copyright (c) 1997-2001 by Secret Labs AB
+# Copyright (c) 1996-1997 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+import array
+
+from . import GimpGradientFile, GimpPaletteFile, ImageColor, PaletteFile
+
+
+class ImagePalette:
+ """
+ Color palette for palette mapped images
+
+ :param mode: The mode to use for the palette. See:
+ :ref:`concept-modes`. Defaults to "RGB"
+ :param palette: An optional palette. If given, it must be a bytearray,
+ an array or a list of ints between 0-255. The list must consist of
+ all channels for one color followed by the next color (e.g. RGBRGBRGB).
+ Defaults to an empty palette.
+ """
+
+ def __init__(self, mode="RGB", palette=None):
+ self.mode = mode
+ self.rawmode = None # if set, palette contains raw data
+ self.palette = palette or bytearray()
+ self.dirty = None
+
+ @property
+ def palette(self):
+ return self._palette
+
+ @palette.setter
+ def palette(self, palette):
+ self._colors = None
+ self._palette = palette
+
+ @property
+ def colors(self):
+ if self._colors is None:
+ mode_len = len(self.mode)
+ self._colors = {}
+ for i in range(0, len(self.palette), mode_len):
+ color = tuple(self.palette[i : i + mode_len])
+ if color in self._colors:
+ continue
+ self._colors[color] = i // mode_len
+ return self._colors
+
+ @colors.setter
+ def colors(self, colors):
+ self._colors = colors
+
+ def copy(self):
+ new = ImagePalette()
+
+ new.mode = self.mode
+ new.rawmode = self.rawmode
+ if self.palette is not None:
+ new.palette = self.palette[:]
+ new.dirty = self.dirty
+
+ return new
+
+ def getdata(self):
+ """
+ Get palette contents in format suitable for the low-level
+ ``im.putpalette`` primitive.
+
+ .. warning:: This method is experimental.
+ """
+ if self.rawmode:
+ return self.rawmode, self.palette
+ return self.mode, self.tobytes()
+
+ def tobytes(self):
+ """Convert palette to bytes.
+
+ .. warning:: This method is experimental.
+ """
+ if self.rawmode:
+ msg = "palette contains raw palette data"
+ raise ValueError(msg)
+ if isinstance(self.palette, bytes):
+ return self.palette
+ arr = array.array("B", self.palette)
+ return arr.tobytes()
+
+ # Declare tostring as an alias for tobytes
+ tostring = tobytes
+
+ def getcolor(self, color, image=None):
+ """Given an rgb tuple, allocate palette entry.
+
+ .. warning:: This method is experimental.
+ """
+ if self.rawmode:
+ msg = "palette contains raw palette data"
+ raise ValueError(msg)
+ if isinstance(color, tuple):
+ if self.mode == "RGB":
+ if len(color) == 4:
+ if color[3] != 255:
+ msg = "cannot add non-opaque RGBA color to RGB palette"
+ raise ValueError(msg)
+ color = color[:3]
+ elif self.mode == "RGBA":
+ if len(color) == 3:
+ color += (255,)
+ try:
+ return self.colors[color]
+ except KeyError as e:
+ # allocate new color slot
+ if not isinstance(self.palette, bytearray):
+ self._palette = bytearray(self.palette)
+ index = len(self.palette) // 3
+ special_colors = ()
+ if image:
+ special_colors = (
+ image.info.get("background"),
+ image.info.get("transparency"),
+ )
+ while index in special_colors:
+ index += 1
+ if index >= 256:
+ if image:
+ # Search for an unused index
+ for i, count in reversed(list(enumerate(image.histogram()))):
+ if count == 0 and i not in special_colors:
+ index = i
+ break
+ if index >= 256:
+ msg = "cannot allocate more than 256 colors"
+ raise ValueError(msg) from e
+ self.colors[color] = index
+ if index * 3 < len(self.palette):
+ self._palette = (
+ self.palette[: index * 3]
+ + bytes(color)
+ + self.palette[index * 3 + 3 :]
+ )
+ else:
+ self._palette += bytes(color)
+ self.dirty = 1
+ return index
+ else:
+ msg = f"unknown color specifier: {repr(color)}"
+ raise ValueError(msg)
+
+ def save(self, fp):
+ """Save palette to text file.
+
+ .. warning:: This method is experimental.
+ """
+ if self.rawmode:
+ msg = "palette contains raw palette data"
+ raise ValueError(msg)
+ if isinstance(fp, str):
+ fp = open(fp, "w")
+ fp.write("# Palette\n")
+ fp.write(f"# Mode: {self.mode}\n")
+ for i in range(256):
+ fp.write(f"{i}")
+ for j in range(i * len(self.mode), (i + 1) * len(self.mode)):
+ try:
+ fp.write(f" {self.palette[j]}")
+ except IndexError:
+ fp.write(" 0")
+ fp.write("\n")
+ fp.close()
+
+
+# --------------------------------------------------------------------
+# Internal
+
+
+def raw(rawmode, data):
+ palette = ImagePalette()
+ palette.rawmode = rawmode
+ palette.palette = data
+ palette.dirty = 1
+ return palette
+
+
+# --------------------------------------------------------------------
+# Factories
+
+
+def make_linear_lut(black, white):
+ lut = []
+ if black == 0:
+ for i in range(256):
+ lut.append(white * i // 255)
+ else:
+ raise NotImplementedError # FIXME
+ return lut
+
+
+def make_gamma_lut(exp):
+ lut = []
+ for i in range(256):
+ lut.append(int(((i / 255.0) ** exp) * 255.0 + 0.5))
+ return lut
+
+
+def negative(mode="RGB"):
+ palette = list(range(256 * len(mode)))
+ palette.reverse()
+ return ImagePalette(mode, [i // len(mode) for i in palette])
+
+
+def random(mode="RGB"):
+ from random import randint
+
+ palette = []
+ for i in range(256 * len(mode)):
+ palette.append(randint(0, 255))
+ return ImagePalette(mode, palette)
+
+
+def sepia(white="#fff0c0"):
+ bands = [make_linear_lut(0, band) for band in ImageColor.getrgb(white)]
+ return ImagePalette("RGB", [bands[i % 3][i // 3] for i in range(256 * 3)])
+
+
+def wedge(mode="RGB"):
+ palette = list(range(256 * len(mode)))
+ return ImagePalette(mode, [i // len(mode) for i in palette])
+
+
+def load(filename):
+ # FIXME: supports GIMP gradients only
+
+ with open(filename, "rb") as fp:
+ for paletteHandler in [
+ GimpPaletteFile.GimpPaletteFile,
+ GimpGradientFile.GimpGradientFile,
+ PaletteFile.PaletteFile,
+ ]:
+ try:
+ fp.seek(0)
+ lut = paletteHandler(fp).getpalette()
+ if lut:
+ break
+ except (SyntaxError, ValueError):
+ # import traceback
+ # traceback.print_exc()
+ pass
+ else:
+ msg = "cannot load palette"
+ raise OSError(msg)
+
+ return lut # data, rawmode
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImagePath.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImagePath.py
new file mode 100644
index 00000000..3d3538c9
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImagePath.py
@@ -0,0 +1,19 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# path interface
+#
+# History:
+# 1996-11-04 fl Created
+# 2002-04-14 fl Added documentation stub class
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1996.
+#
+# See the README file for information on usage and redistribution.
+#
+
+from . import Image
+
+Path = Image.core.path
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageQt.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageQt.py
new file mode 100644
index 00000000..9b724545
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageQt.py
@@ -0,0 +1,216 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# a simple Qt image interface.
+#
+# history:
+# 2006-06-03 fl: created
+# 2006-06-04 fl: inherit from QImage instead of wrapping it
+# 2006-06-05 fl: removed toimage helper; move string support to ImageQt
+# 2013-11-13 fl: add support for Qt5 (aurelien.ballier@cyclonit.com)
+#
+# Copyright (c) 2006 by Secret Labs AB
+# Copyright (c) 2006 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+import sys
+from io import BytesIO
+
+from . import Image
+from ._util import is_path
+
+qt_versions = [
+ ["6", "PyQt6"],
+ ["side6", "PySide6"],
+]
+
+# If a version has already been imported, attempt it first
+qt_versions.sort(key=lambda qt_version: qt_version[1] in sys.modules, reverse=True)
+for qt_version, qt_module in qt_versions:
+ try:
+ if qt_module == "PyQt6":
+ from PyQt6.QtCore import QBuffer, QIODevice
+ from PyQt6.QtGui import QImage, QPixmap, qRgba
+ elif qt_module == "PySide6":
+ from PySide6.QtCore import QBuffer, QIODevice
+ from PySide6.QtGui import QImage, QPixmap, qRgba
+ except (ImportError, RuntimeError):
+ continue
+ qt_is_installed = True
+ break
+else:
+ qt_is_installed = False
+ qt_version = None
+
+
+def rgb(r, g, b, a=255):
+ """(Internal) Turns an RGB color into a Qt compatible color integer."""
+ # use qRgb to pack the colors, and then turn the resulting long
+ # into a negative integer with the same bitpattern.
+ return qRgba(r, g, b, a) & 0xFFFFFFFF
+
+
+def fromqimage(im):
+ """
+ :param im: QImage or PIL ImageQt object
+ """
+ buffer = QBuffer()
+ if qt_version == "6":
+ try:
+ qt_openmode = QIODevice.OpenModeFlag
+ except AttributeError:
+ qt_openmode = QIODevice.OpenMode
+ else:
+ qt_openmode = QIODevice
+ buffer.open(qt_openmode.ReadWrite)
+ # preserve alpha channel with png
+ # otherwise ppm is more friendly with Image.open
+ if im.hasAlphaChannel():
+ im.save(buffer, "png")
+ else:
+ im.save(buffer, "ppm")
+
+ b = BytesIO()
+ b.write(buffer.data())
+ buffer.close()
+ b.seek(0)
+
+ return Image.open(b)
+
+
+def fromqpixmap(im):
+ return fromqimage(im)
+ # buffer = QBuffer()
+ # buffer.open(QIODevice.ReadWrite)
+ # # im.save(buffer)
+ # # What if png doesn't support some image features like animation?
+ # im.save(buffer, 'ppm')
+ # bytes_io = BytesIO()
+ # bytes_io.write(buffer.data())
+ # buffer.close()
+ # bytes_io.seek(0)
+ # return Image.open(bytes_io)
+
+
+def align8to32(bytes, width, mode):
+ """
+ converts each scanline of data from 8 bit to 32 bit aligned
+ """
+
+ bits_per_pixel = {"1": 1, "L": 8, "P": 8, "I;16": 16}[mode]
+
+ # calculate bytes per line and the extra padding if needed
+ bits_per_line = bits_per_pixel * width
+ full_bytes_per_line, remaining_bits_per_line = divmod(bits_per_line, 8)
+ bytes_per_line = full_bytes_per_line + (1 if remaining_bits_per_line else 0)
+
+ extra_padding = -bytes_per_line % 4
+
+ # already 32 bit aligned by luck
+ if not extra_padding:
+ return bytes
+
+ new_data = []
+ for i in range(len(bytes) // bytes_per_line):
+ new_data.append(
+ bytes[i * bytes_per_line : (i + 1) * bytes_per_line]
+ + b"\x00" * extra_padding
+ )
+
+ return b"".join(new_data)
+
+
+def _toqclass_helper(im):
+ data = None
+ colortable = None
+ exclusive_fp = False
+
+ # handle filename, if given instead of image name
+ if hasattr(im, "toUtf8"):
+ # FIXME - is this really the best way to do this?
+ im = str(im.toUtf8(), "utf-8")
+ if is_path(im):
+ im = Image.open(im)
+ exclusive_fp = True
+
+ qt_format = QImage.Format if qt_version == "6" else QImage
+ if im.mode == "1":
+ format = qt_format.Format_Mono
+ elif im.mode == "L":
+ format = qt_format.Format_Indexed8
+ colortable = []
+ for i in range(256):
+ colortable.append(rgb(i, i, i))
+ elif im.mode == "P":
+ format = qt_format.Format_Indexed8
+ colortable = []
+ palette = im.getpalette()
+ for i in range(0, len(palette), 3):
+ colortable.append(rgb(*palette[i : i + 3]))
+ elif im.mode == "RGB":
+ # Populate the 4th channel with 255
+ im = im.convert("RGBA")
+
+ data = im.tobytes("raw", "BGRA")
+ format = qt_format.Format_RGB32
+ elif im.mode == "RGBA":
+ data = im.tobytes("raw", "BGRA")
+ format = qt_format.Format_ARGB32
+ elif im.mode == "I;16" and hasattr(qt_format, "Format_Grayscale16"): # Qt 5.13+
+ im = im.point(lambda i: i * 256)
+
+ format = qt_format.Format_Grayscale16
+ else:
+ if exclusive_fp:
+ im.close()
+ msg = f"unsupported image mode {repr(im.mode)}"
+ raise ValueError(msg)
+
+ size = im.size
+ __data = data or align8to32(im.tobytes(), size[0], im.mode)
+ if exclusive_fp:
+ im.close()
+ return {"data": __data, "size": size, "format": format, "colortable": colortable}
+
+
+if qt_is_installed:
+
+ class ImageQt(QImage):
+ def __init__(self, im):
+ """
+ An PIL image wrapper for Qt. This is a subclass of PyQt's QImage
+ class.
+
+ :param im: A PIL Image object, or a file name (given either as
+ Python string or a PyQt string object).
+ """
+ im_data = _toqclass_helper(im)
+ # must keep a reference, or Qt will crash!
+ # All QImage constructors that take data operate on an existing
+ # buffer, so this buffer has to hang on for the life of the image.
+ # Fixes https://github.com/python-pillow/Pillow/issues/1370
+ self.__data = im_data["data"]
+ super().__init__(
+ self.__data,
+ im_data["size"][0],
+ im_data["size"][1],
+ im_data["format"],
+ )
+ if im_data["colortable"]:
+ self.setColorTable(im_data["colortable"])
+
+
+def toqimage(im):
+ return ImageQt(im)
+
+
+def toqpixmap(im):
+ # # This doesn't work. For now using a dumb approach.
+ # im_data = _toqclass_helper(im)
+ # result = QPixmap(im_data["size"][0], im_data["size"][1])
+ # result.loadFromData(im_data["data"])
+ qimage = toqimage(im)
+ return QPixmap.fromImage(qimage)
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageSequence.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageSequence.py
new file mode 100644
index 00000000..c4bb6334
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageSequence.py
@@ -0,0 +1,76 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# sequence support classes
+#
+# history:
+# 1997-02-20 fl Created
+#
+# Copyright (c) 1997 by Secret Labs AB.
+# Copyright (c) 1997 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+
+##
+
+
+class Iterator:
+ """
+ This class implements an iterator object that can be used to loop
+ over an image sequence.
+
+ You can use the ``[]`` operator to access elements by index. This operator
+ will raise an :py:exc:`IndexError` if you try to access a nonexistent
+ frame.
+
+ :param im: An image object.
+ """
+
+ def __init__(self, im):
+ if not hasattr(im, "seek"):
+ msg = "im must have seek method"
+ raise AttributeError(msg)
+ self.im = im
+ self.position = getattr(self.im, "_min_frame", 0)
+
+ def __getitem__(self, ix):
+ try:
+ self.im.seek(ix)
+ return self.im
+ except EOFError as e:
+ raise IndexError from e # end of sequence
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ try:
+ self.im.seek(self.position)
+ self.position += 1
+ return self.im
+ except EOFError as e:
+ raise StopIteration from e
+
+
+def all_frames(im, func=None):
+ """
+ Applies a given function to all frames in an image or a list of images.
+ The frames are returned as a list of separate images.
+
+ :param im: An image, or a list of images.
+ :param func: The function to apply to all of the image frames.
+ :returns: A list of images.
+ """
+ if not isinstance(im, list):
+ im = [im]
+
+ ims = []
+ for imSequence in im:
+ current = imSequence.tell()
+
+ ims += [im_frame.copy() for im_frame in Iterator(imSequence)]
+
+ imSequence.seek(current)
+ return [func(im) for im in ims] if func else ims
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageShow.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageShow.py
new file mode 100644
index 00000000..8b1c3f8b
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageShow.py
@@ -0,0 +1,323 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# im.show() drivers
+#
+# History:
+# 2008-04-06 fl Created
+#
+# Copyright (c) Secret Labs AB 2008.
+#
+# See the README file for information on usage and redistribution.
+#
+import os
+import shutil
+import subprocess
+import sys
+from shlex import quote
+
+from . import Image
+
+_viewers = []
+
+
+def register(viewer, order=1):
+ """
+ The :py:func:`register` function is used to register additional viewers::
+
+ from PIL import ImageShow
+ ImageShow.register(MyViewer()) # MyViewer will be used as a last resort
+ ImageShow.register(MySecondViewer(), 0) # MySecondViewer will be prioritised
+ ImageShow.register(ImageShow.XVViewer(), 0) # XVViewer will be prioritised
+
+ :param viewer: The viewer to be registered.
+ :param order:
+ Zero or a negative integer to prepend this viewer to the list,
+ a positive integer to append it.
+ """
+ try:
+ if issubclass(viewer, Viewer):
+ viewer = viewer()
+ except TypeError:
+ pass # raised if viewer wasn't a class
+ if order > 0:
+ _viewers.append(viewer)
+ else:
+ _viewers.insert(0, viewer)
+
+
+def show(image, title=None, **options):
+ r"""
+ Display a given image.
+
+ :param image: An image object.
+ :param title: Optional title. Not all viewers can display the title.
+ :param \**options: Additional viewer options.
+ :returns: ``True`` if a suitable viewer was found, ``False`` otherwise.
+ """
+ for viewer in _viewers:
+ if viewer.show(image, title=title, **options):
+ return True
+ return False
+
+
+class Viewer:
+ """Base class for viewers."""
+
+ # main api
+
+ def show(self, image, **options):
+ """
+ The main function for displaying an image.
+ Converts the given image to the target format and displays it.
+ """
+
+ if not (
+ image.mode in ("1", "RGBA")
+ or (self.format == "PNG" and image.mode in ("I;16", "LA"))
+ ):
+ base = Image.getmodebase(image.mode)
+ if image.mode != base:
+ image = image.convert(base)
+
+ return self.show_image(image, **options)
+
+ # hook methods
+
+ format = None
+ """The format to convert the image into."""
+ options = {}
+ """Additional options used to convert the image."""
+
+ def get_format(self, image):
+ """Return format name, or ``None`` to save as PGM/PPM."""
+ return self.format
+
+ def get_command(self, file, **options):
+ """
+ Returns the command used to display the file.
+ Not implemented in the base class.
+ """
+ raise NotImplementedError
+
+ def save_image(self, image):
+ """Save to temporary file and return filename."""
+ return image._dump(format=self.get_format(image), **self.options)
+
+ def show_image(self, image, **options):
+ """Display the given image."""
+ return self.show_file(self.save_image(image), **options)
+
+ def show_file(self, path, **options):
+ """
+ Display given file.
+ """
+ os.system(self.get_command(path, **options)) # nosec
+ return 1
+
+
+# --------------------------------------------------------------------
+
+
+class WindowsViewer(Viewer):
+ """The default viewer on Windows is the default system application for PNG files."""
+
+ format = "PNG"
+ options = {"compress_level": 1, "save_all": True}
+
+ def get_command(self, file, **options):
+ return (
+ f'start "Pillow" /WAIT "{file}" '
+ "&& ping -n 4 127.0.0.1 >NUL "
+ f'&& del /f "{file}"'
+ )
+
+
+if sys.platform == "win32":
+ register(WindowsViewer)
+
+
+class MacViewer(Viewer):
+ """The default viewer on macOS using ``Preview.app``."""
+
+ format = "PNG"
+ options = {"compress_level": 1, "save_all": True}
+
+ def get_command(self, file, **options):
+ # on darwin open returns immediately resulting in the temp
+ # file removal while app is opening
+ command = "open -a Preview.app"
+ command = f"({command} {quote(file)}; sleep 20; rm -f {quote(file)})&"
+ return command
+
+ def show_file(self, path, **options):
+ """
+ Display given file.
+ """
+ subprocess.call(["open", "-a", "Preview.app", path])
+ executable = sys.executable or shutil.which("python3")
+ if executable:
+ subprocess.Popen(
+ [
+ executable,
+ "-c",
+ "import os, sys, time; time.sleep(20); os.remove(sys.argv[1])",
+ path,
+ ]
+ )
+ return 1
+
+
+if sys.platform == "darwin":
+ register(MacViewer)
+
+
+class UnixViewer(Viewer):
+ format = "PNG"
+ options = {"compress_level": 1, "save_all": True}
+
+ def get_command(self, file, **options):
+ command = self.get_command_ex(file, **options)[0]
+ return f"({command} {quote(file)}"
+
+
+class XDGViewer(UnixViewer):
+ """
+ The freedesktop.org ``xdg-open`` command.
+ """
+
+ def get_command_ex(self, file, **options):
+ command = executable = "xdg-open"
+ return command, executable
+
+ def show_file(self, path, **options):
+ """
+ Display given file.
+ """
+ subprocess.Popen(["xdg-open", path])
+ return 1
+
+
+class DisplayViewer(UnixViewer):
+ """
+ The ImageMagick ``display`` command.
+ This viewer supports the ``title`` parameter.
+ """
+
+ def get_command_ex(self, file, title=None, **options):
+ command = executable = "display"
+ if title:
+ command += f" -title {quote(title)}"
+ return command, executable
+
+ def show_file(self, path, **options):
+ """
+ Display given file.
+ """
+ args = ["display"]
+ title = options.get("title")
+ if title:
+ args += ["-title", title]
+ args.append(path)
+
+ subprocess.Popen(args)
+ return 1
+
+
+class GmDisplayViewer(UnixViewer):
+ """The GraphicsMagick ``gm display`` command."""
+
+ def get_command_ex(self, file, **options):
+ executable = "gm"
+ command = "gm display"
+ return command, executable
+
+ def show_file(self, path, **options):
+ """
+ Display given file.
+ """
+ subprocess.Popen(["gm", "display", path])
+ return 1
+
+
+class EogViewer(UnixViewer):
+ """The GNOME Image Viewer ``eog`` command."""
+
+ def get_command_ex(self, file, **options):
+ executable = "eog"
+ command = "eog -n"
+ return command, executable
+
+ def show_file(self, path, **options):
+ """
+ Display given file.
+ """
+ subprocess.Popen(["eog", "-n", path])
+ return 1
+
+
+class XVViewer(UnixViewer):
+ """
+ The X Viewer ``xv`` command.
+ This viewer supports the ``title`` parameter.
+ """
+
+ def get_command_ex(self, file, title=None, **options):
+ # note: xv is pretty outdated. most modern systems have
+ # imagemagick's display command instead.
+ command = executable = "xv"
+ if title:
+ command += f" -name {quote(title)}"
+ return command, executable
+
+ def show_file(self, path, **options):
+ """
+ Display given file.
+ """
+ args = ["xv"]
+ title = options.get("title")
+ if title:
+ args += ["-name", title]
+ args.append(path)
+
+ subprocess.Popen(args)
+ return 1
+
+
+if sys.platform not in ("win32", "darwin"): # unixoids
+ if shutil.which("xdg-open"):
+ register(XDGViewer)
+ if shutil.which("display"):
+ register(DisplayViewer)
+ if shutil.which("gm"):
+ register(GmDisplayViewer)
+ if shutil.which("eog"):
+ register(EogViewer)
+ if shutil.which("xv"):
+ register(XVViewer)
+
+
+class IPythonViewer(Viewer):
+ """The viewer for IPython frontends."""
+
+ def show_image(self, image, **options):
+ ipython_display(image)
+ return 1
+
+
+try:
+ from IPython.display import display as ipython_display
+except ImportError:
+ pass
+else:
+ register(IPythonViewer)
+
+
+if __name__ == "__main__":
+ if len(sys.argv) < 2:
+ print("Syntax: python3 ImageShow.py imagefile [title]")
+ sys.exit()
+
+ with Image.open(sys.argv[1]) as im:
+ print(show(im, *sys.argv[2:]))
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageStat.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageStat.py
new file mode 100644
index 00000000..b7ebddf0
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageStat.py
@@ -0,0 +1,148 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# global image statistics
+#
+# History:
+# 1996-04-05 fl Created
+# 1997-05-21 fl Added mask; added rms, var, stddev attributes
+# 1997-08-05 fl Added median
+# 1998-07-05 hk Fixed integer overflow error
+#
+# Notes:
+# This class shows how to implement delayed evaluation of attributes.
+# To get a certain value, simply access the corresponding attribute.
+# The __getattr__ dispatcher takes care of the rest.
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1996-97.
+#
+# See the README file for information on usage and redistribution.
+#
+
+import functools
+import math
+import operator
+
+
+class Stat:
+ def __init__(self, image_or_list, mask=None):
+ try:
+ if mask:
+ self.h = image_or_list.histogram(mask)
+ else:
+ self.h = image_or_list.histogram()
+ except AttributeError:
+ self.h = image_or_list # assume it to be a histogram list
+ if not isinstance(self.h, list):
+ msg = "first argument must be image or list"
+ raise TypeError(msg)
+ self.bands = list(range(len(self.h) // 256))
+
+ def __getattr__(self, id):
+ """Calculate missing attribute"""
+ if id[:4] == "_get":
+ raise AttributeError(id)
+ # calculate missing attribute
+ v = getattr(self, "_get" + id)()
+ setattr(self, id, v)
+ return v
+
+ def _getextrema(self):
+ """Get min/max values for each band in the image"""
+
+ def minmax(histogram):
+ n = 255
+ x = 0
+ for i in range(256):
+ if histogram[i]:
+ n = min(n, i)
+ x = max(x, i)
+ return n, x # returns (255, 0) if there's no data in the histogram
+
+ v = []
+ for i in range(0, len(self.h), 256):
+ v.append(minmax(self.h[i:]))
+ return v
+
+ def _getcount(self):
+ """Get total number of pixels in each layer"""
+
+ v = []
+ for i in range(0, len(self.h), 256):
+ v.append(functools.reduce(operator.add, self.h[i : i + 256]))
+ return v
+
+ def _getsum(self):
+ """Get sum of all pixels in each layer"""
+
+ v = []
+ for i in range(0, len(self.h), 256):
+ layer_sum = 0.0
+ for j in range(256):
+ layer_sum += j * self.h[i + j]
+ v.append(layer_sum)
+ return v
+
+ def _getsum2(self):
+ """Get squared sum of all pixels in each layer"""
+
+ v = []
+ for i in range(0, len(self.h), 256):
+ sum2 = 0.0
+ for j in range(256):
+ sum2 += (j**2) * float(self.h[i + j])
+ v.append(sum2)
+ return v
+
+ def _getmean(self):
+ """Get average pixel level for each layer"""
+
+ v = []
+ for i in self.bands:
+ v.append(self.sum[i] / self.count[i])
+ return v
+
+ def _getmedian(self):
+ """Get median pixel level for each layer"""
+
+ v = []
+ for i in self.bands:
+ s = 0
+ half = self.count[i] // 2
+ b = i * 256
+ for j in range(256):
+ s = s + self.h[b + j]
+ if s > half:
+ break
+ v.append(j)
+ return v
+
+ def _getrms(self):
+ """Get RMS for each layer"""
+
+ v = []
+ for i in self.bands:
+ v.append(math.sqrt(self.sum2[i] / self.count[i]))
+ return v
+
+ def _getvar(self):
+ """Get variance for each layer"""
+
+ v = []
+ for i in self.bands:
+ n = self.count[i]
+ v.append((self.sum2[i] - (self.sum[i] ** 2.0) / n) / n)
+ return v
+
+ def _getstddev(self):
+ """Get standard deviation for each layer"""
+
+ v = []
+ for i in self.bands:
+ v.append(math.sqrt(self.var[i]))
+ return v
+
+
+Global = Stat # compatibility
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageTk.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageTk.py
new file mode 100644
index 00000000..bf98eb2c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageTk.py
@@ -0,0 +1,283 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# a Tk display interface
+#
+# History:
+# 96-04-08 fl Created
+# 96-09-06 fl Added getimage method
+# 96-11-01 fl Rewritten, removed image attribute and crop method
+# 97-05-09 fl Use PyImagingPaste method instead of image type
+# 97-05-12 fl Minor tweaks to match the IFUNC95 interface
+# 97-05-17 fl Support the "pilbitmap" booster patch
+# 97-06-05 fl Added file= and data= argument to image constructors
+# 98-03-09 fl Added width and height methods to Image classes
+# 98-07-02 fl Use default mode for "P" images without palette attribute
+# 98-07-02 fl Explicitly destroy Tkinter image objects
+# 99-07-24 fl Support multiple Tk interpreters (from Greg Couch)
+# 99-07-26 fl Automatically hook into Tkinter (if possible)
+# 99-08-15 fl Hook uses _imagingtk instead of _imaging
+#
+# Copyright (c) 1997-1999 by Secret Labs AB
+# Copyright (c) 1996-1997 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+import tkinter
+from io import BytesIO
+
+from . import Image
+
+# --------------------------------------------------------------------
+# Check for Tkinter interface hooks
+
+_pilbitmap_ok = None
+
+
+def _pilbitmap_check():
+ global _pilbitmap_ok
+ if _pilbitmap_ok is None:
+ try:
+ im = Image.new("1", (1, 1))
+ tkinter.BitmapImage(data=f"PIL:{im.im.id}")
+ _pilbitmap_ok = 1
+ except tkinter.TclError:
+ _pilbitmap_ok = 0
+ return _pilbitmap_ok
+
+
+def _get_image_from_kw(kw):
+ source = None
+ if "file" in kw:
+ source = kw.pop("file")
+ elif "data" in kw:
+ source = BytesIO(kw.pop("data"))
+ if source:
+ return Image.open(source)
+
+
+def _pyimagingtkcall(command, photo, id):
+ tk = photo.tk
+ try:
+ tk.call(command, photo, id)
+ except tkinter.TclError:
+ # activate Tkinter hook
+ # may raise an error if it cannot attach to Tkinter
+ from . import _imagingtk
+
+ _imagingtk.tkinit(tk.interpaddr())
+ tk.call(command, photo, id)
+
+
+# --------------------------------------------------------------------
+# PhotoImage
+
+
+class PhotoImage:
+ """
+ A Tkinter-compatible photo image. This can be used
+ everywhere Tkinter expects an image object. If the image is an RGBA
+ image, pixels having alpha 0 are treated as transparent.
+
+ The constructor takes either a PIL image, or a mode and a size.
+ Alternatively, you can use the ``file`` or ``data`` options to initialize
+ the photo image object.
+
+ :param image: Either a PIL image, or a mode string. If a mode string is
+ used, a size must also be given.
+ :param size: If the first argument is a mode string, this defines the size
+ of the image.
+ :keyword file: A filename to load the image from (using
+ ``Image.open(file)``).
+ :keyword data: An 8-bit string containing image data (as loaded from an
+ image file).
+ """
+
+ def __init__(self, image=None, size=None, **kw):
+ # Tk compatibility: file or data
+ if image is None:
+ image = _get_image_from_kw(kw)
+
+ if hasattr(image, "mode") and hasattr(image, "size"):
+ # got an image instead of a mode
+ mode = image.mode
+ if mode == "P":
+ # palette mapped data
+ image.apply_transparency()
+ image.load()
+ try:
+ mode = image.palette.mode
+ except AttributeError:
+ mode = "RGB" # default
+ size = image.size
+ kw["width"], kw["height"] = size
+ else:
+ mode = image
+ image = None
+
+ if mode not in ["1", "L", "RGB", "RGBA"]:
+ mode = Image.getmodebase(mode)
+
+ self.__mode = mode
+ self.__size = size
+ self.__photo = tkinter.PhotoImage(**kw)
+ self.tk = self.__photo.tk
+ if image:
+ self.paste(image)
+
+ def __del__(self):
+ name = self.__photo.name
+ self.__photo.name = None
+ try:
+ self.__photo.tk.call("image", "delete", name)
+ except Exception:
+ pass # ignore internal errors
+
+ def __str__(self):
+ """
+ Get the Tkinter photo image identifier. This method is automatically
+ called by Tkinter whenever a PhotoImage object is passed to a Tkinter
+ method.
+
+ :return: A Tkinter photo image identifier (a string).
+ """
+ return str(self.__photo)
+
+ def width(self):
+ """
+ Get the width of the image.
+
+ :return: The width, in pixels.
+ """
+ return self.__size[0]
+
+ def height(self):
+ """
+ Get the height of the image.
+
+ :return: The height, in pixels.
+ """
+ return self.__size[1]
+
+ def paste(self, im):
+ """
+ Paste a PIL image into the photo image. Note that this can
+ be very slow if the photo image is displayed.
+
+ :param im: A PIL image. The size must match the target region. If the
+ mode does not match, the image is converted to the mode of
+ the bitmap image.
+ """
+ # convert to blittable
+ im.load()
+ image = im.im
+ if image.isblock() and im.mode == self.__mode:
+ block = image
+ else:
+ block = image.new_block(self.__mode, im.size)
+ image.convert2(block, image) # convert directly between buffers
+
+ _pyimagingtkcall("PyImagingPhoto", self.__photo, block.id)
+
+
+# --------------------------------------------------------------------
+# BitmapImage
+
+
+class BitmapImage:
+ """
+ A Tkinter-compatible bitmap image. This can be used everywhere Tkinter
+ expects an image object.
+
+ The given image must have mode "1". Pixels having value 0 are treated as
+ transparent. Options, if any, are passed on to Tkinter. The most commonly
+ used option is ``foreground``, which is used to specify the color for the
+ non-transparent parts. See the Tkinter documentation for information on
+ how to specify colours.
+
+ :param image: A PIL image.
+ """
+
+ def __init__(self, image=None, **kw):
+ # Tk compatibility: file or data
+ if image is None:
+ image = _get_image_from_kw(kw)
+
+ self.__mode = image.mode
+ self.__size = image.size
+
+ if _pilbitmap_check():
+ # fast way (requires the pilbitmap booster patch)
+ image.load()
+ kw["data"] = f"PIL:{image.im.id}"
+ self.__im = image # must keep a reference
+ else:
+ # slow but safe way
+ kw["data"] = image.tobitmap()
+ self.__photo = tkinter.BitmapImage(**kw)
+
+ def __del__(self):
+ name = self.__photo.name
+ self.__photo.name = None
+ try:
+ self.__photo.tk.call("image", "delete", name)
+ except Exception:
+ pass # ignore internal errors
+
+ def width(self):
+ """
+ Get the width of the image.
+
+ :return: The width, in pixels.
+ """
+ return self.__size[0]
+
+ def height(self):
+ """
+ Get the height of the image.
+
+ :return: The height, in pixels.
+ """
+ return self.__size[1]
+
+ def __str__(self):
+ """
+ Get the Tkinter bitmap image identifier. This method is automatically
+ called by Tkinter whenever a BitmapImage object is passed to a Tkinter
+ method.
+
+ :return: A Tkinter bitmap image identifier (a string).
+ """
+ return str(self.__photo)
+
+
+def getimage(photo):
+ """Copies the contents of a PhotoImage to a PIL image memory."""
+ im = Image.new("RGBA", (photo.width(), photo.height()))
+ block = im.im
+
+ _pyimagingtkcall("PyImagingPhotoGet", photo, block.id)
+
+ return im
+
+
+def _show(image, title):
+ """Helper for the Image.show method."""
+
+ class UI(tkinter.Label):
+ def __init__(self, master, im):
+ if im.mode == "1":
+ self.image = BitmapImage(im, foreground="white", master=master)
+ else:
+ self.image = PhotoImage(im, master=master)
+ super().__init__(master, image=self.image, bg="black", bd=0)
+
+ if not tkinter._default_root:
+ msg = "tkinter not initialized"
+ raise OSError(msg)
+ top = tkinter.Toplevel()
+ if title:
+ top.title(title)
+ UI(top, image).pack()
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageTransform.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageTransform.py
new file mode 100644
index 00000000..7881f0d2
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageTransform.py
@@ -0,0 +1,102 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# transform wrappers
+#
+# History:
+# 2002-04-08 fl Created
+#
+# Copyright (c) 2002 by Secret Labs AB
+# Copyright (c) 2002 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+from . import Image
+
+
+class Transform(Image.ImageTransformHandler):
+ def __init__(self, data):
+ self.data = data
+
+ def getdata(self):
+ return self.method, self.data
+
+ def transform(self, size, image, **options):
+ # can be overridden
+ method, data = self.getdata()
+ return image.transform(size, method, data, **options)
+
+
+class AffineTransform(Transform):
+ """
+ Define an affine image transform.
+
+ This function takes a 6-tuple (a, b, c, d, e, f) which contain the first
+ two rows from an affine transform matrix. For each pixel (x, y) in the
+ output image, the new value is taken from a position (a x + b y + c,
+ d x + e y + f) in the input image, rounded to nearest pixel.
+
+ This function can be used to scale, translate, rotate, and shear the
+ original image.
+
+ See :py:meth:`~PIL.Image.Image.transform`
+
+ :param matrix: A 6-tuple (a, b, c, d, e, f) containing the first two rows
+ from an affine transform matrix.
+ """
+
+ method = Image.Transform.AFFINE
+
+
+class ExtentTransform(Transform):
+ """
+ Define a transform to extract a subregion from an image.
+
+ Maps a rectangle (defined by two corners) from the image to a rectangle of
+ the given size. The resulting image will contain data sampled from between
+ the corners, such that (x0, y0) in the input image will end up at (0,0) in
+ the output image, and (x1, y1) at size.
+
+ This method can be used to crop, stretch, shrink, or mirror an arbitrary
+ rectangle in the current image. It is slightly slower than crop, but about
+ as fast as a corresponding resize operation.
+
+ See :py:meth:`~PIL.Image.Image.transform`
+
+ :param bbox: A 4-tuple (x0, y0, x1, y1) which specifies two points in the
+ input image's coordinate system. See :ref:`coordinate-system`.
+ """
+
+ method = Image.Transform.EXTENT
+
+
+class QuadTransform(Transform):
+ """
+ Define a quad image transform.
+
+ Maps a quadrilateral (a region defined by four corners) from the image to a
+ rectangle of the given size.
+
+ See :py:meth:`~PIL.Image.Image.transform`
+
+ :param xy: An 8-tuple (x0, y0, x1, y1, x2, y2, x3, y3) which contain the
+ upper left, lower left, lower right, and upper right corner of the
+ source quadrilateral.
+ """
+
+ method = Image.Transform.QUAD
+
+
+class MeshTransform(Transform):
+ """
+ Define a mesh image transform. A mesh transform consists of one or more
+ individual quad transforms.
+
+ See :py:meth:`~PIL.Image.Image.transform`
+
+ :param data: A list of (bbox, quad) tuples.
+ """
+
+ method = Image.Transform.MESH
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImageWin.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImageWin.py
new file mode 100644
index 00000000..ca9b14c8
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImageWin.py
@@ -0,0 +1,230 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# a Windows DIB display interface
+#
+# History:
+# 1996-05-20 fl Created
+# 1996-09-20 fl Fixed subregion exposure
+# 1997-09-21 fl Added draw primitive (for tzPrint)
+# 2003-05-21 fl Added experimental Window/ImageWindow classes
+# 2003-09-05 fl Added fromstring/tostring methods
+#
+# Copyright (c) Secret Labs AB 1997-2003.
+# Copyright (c) Fredrik Lundh 1996-2003.
+#
+# See the README file for information on usage and redistribution.
+#
+
+from . import Image
+
+
+class HDC:
+ """
+ Wraps an HDC integer. The resulting object can be passed to the
+ :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose`
+ methods.
+ """
+
+ def __init__(self, dc):
+ self.dc = dc
+
+ def __int__(self):
+ return self.dc
+
+
+class HWND:
+ """
+ Wraps an HWND integer. The resulting object can be passed to the
+ :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose`
+ methods, instead of a DC.
+ """
+
+ def __init__(self, wnd):
+ self.wnd = wnd
+
+ def __int__(self):
+ return self.wnd
+
+
+class Dib:
+ """
+ A Windows bitmap with the given mode and size. The mode can be one of "1",
+ "L", "P", or "RGB".
+
+ If the display requires a palette, this constructor creates a suitable
+ palette and associates it with the image. For an "L" image, 128 greylevels
+ are allocated. For an "RGB" image, a 6x6x6 colour cube is used, together
+ with 20 greylevels.
+
+ To make sure that palettes work properly under Windows, you must call the
+ ``palette`` method upon certain events from Windows.
+
+ :param image: Either a PIL image, or a mode string. If a mode string is
+ used, a size must also be given. The mode can be one of "1",
+ "L", "P", or "RGB".
+ :param size: If the first argument is a mode string, this
+ defines the size of the image.
+ """
+
+ def __init__(self, image, size=None):
+ if hasattr(image, "mode") and hasattr(image, "size"):
+ mode = image.mode
+ size = image.size
+ else:
+ mode = image
+ image = None
+ if mode not in ["1", "L", "P", "RGB"]:
+ mode = Image.getmodebase(mode)
+ self.image = Image.core.display(mode, size)
+ self.mode = mode
+ self.size = size
+ if image:
+ self.paste(image)
+
+ def expose(self, handle):
+ """
+ Copy the bitmap contents to a device context.
+
+ :param handle: Device context (HDC), cast to a Python integer, or an
+ HDC or HWND instance. In PythonWin, you can use
+ ``CDC.GetHandleAttrib()`` to get a suitable handle.
+ """
+ if isinstance(handle, HWND):
+ dc = self.image.getdc(handle)
+ try:
+ result = self.image.expose(dc)
+ finally:
+ self.image.releasedc(handle, dc)
+ else:
+ result = self.image.expose(handle)
+ return result
+
+ def draw(self, handle, dst, src=None):
+ """
+ Same as expose, but allows you to specify where to draw the image, and
+ what part of it to draw.
+
+ The destination and source areas are given as 4-tuple rectangles. If
+ the source is omitted, the entire image is copied. If the source and
+ the destination have different sizes, the image is resized as
+ necessary.
+ """
+ if not src:
+ src = (0, 0) + self.size
+ if isinstance(handle, HWND):
+ dc = self.image.getdc(handle)
+ try:
+ result = self.image.draw(dc, dst, src)
+ finally:
+ self.image.releasedc(handle, dc)
+ else:
+ result = self.image.draw(handle, dst, src)
+ return result
+
+ def query_palette(self, handle):
+ """
+ Installs the palette associated with the image in the given device
+ context.
+
+ This method should be called upon **QUERYNEWPALETTE** and
+ **PALETTECHANGED** events from Windows. If this method returns a
+ non-zero value, one or more display palette entries were changed, and
+ the image should be redrawn.
+
+ :param handle: Device context (HDC), cast to a Python integer, or an
+ HDC or HWND instance.
+ :return: A true value if one or more entries were changed (this
+ indicates that the image should be redrawn).
+ """
+ if isinstance(handle, HWND):
+ handle = self.image.getdc(handle)
+ try:
+ result = self.image.query_palette(handle)
+ finally:
+ self.image.releasedc(handle, handle)
+ else:
+ result = self.image.query_palette(handle)
+ return result
+
+ def paste(self, im, box=None):
+ """
+ Paste a PIL image into the bitmap image.
+
+ :param im: A PIL image. The size must match the target region.
+ If the mode does not match, the image is converted to the
+ mode of the bitmap image.
+ :param box: A 4-tuple defining the left, upper, right, and
+ lower pixel coordinate. See :ref:`coordinate-system`. If
+ None is given instead of a tuple, all of the image is
+ assumed.
+ """
+ im.load()
+ if self.mode != im.mode:
+ im = im.convert(self.mode)
+ if box:
+ self.image.paste(im.im, box)
+ else:
+ self.image.paste(im.im)
+
+ def frombytes(self, buffer):
+ """
+ Load display memory contents from byte data.
+
+ :param buffer: A buffer containing display data (usually
+ data returned from :py:func:`~PIL.ImageWin.Dib.tobytes`)
+ """
+ return self.image.frombytes(buffer)
+
+ def tobytes(self):
+ """
+ Copy display memory contents to bytes object.
+
+ :return: A bytes object containing display data.
+ """
+ return self.image.tobytes()
+
+
+class Window:
+ """Create a Window with the given title size."""
+
+ def __init__(self, title="PIL", width=None, height=None):
+ self.hwnd = Image.core.createwindow(
+ title, self.__dispatcher, width or 0, height or 0
+ )
+
+ def __dispatcher(self, action, *args):
+ return getattr(self, "ui_handle_" + action)(*args)
+
+ def ui_handle_clear(self, dc, x0, y0, x1, y1):
+ pass
+
+ def ui_handle_damage(self, x0, y0, x1, y1):
+ pass
+
+ def ui_handle_destroy(self):
+ pass
+
+ def ui_handle_repair(self, dc, x0, y0, x1, y1):
+ pass
+
+ def ui_handle_resize(self, width, height):
+ pass
+
+ def mainloop(self):
+ Image.core.eventloop()
+
+
+class ImageWindow(Window):
+ """Create an image window which displays the given image."""
+
+ def __init__(self, image, title="PIL"):
+ if not isinstance(image, Dib):
+ image = Dib(image)
+ self.image = image
+ width, height = image.size
+ super().__init__(title, width=width, height=height)
+
+ def ui_handle_repair(self, dc, x0, y0, x1, y1):
+ self.image.draw(dc, (x0, y0, x1, y1))
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/ImtImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/ImtImagePlugin.py
new file mode 100644
index 00000000..d409fcd5
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/ImtImagePlugin.py
@@ -0,0 +1,101 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# IM Tools support for PIL
+#
+# history:
+# 1996-05-27 fl Created (read 8-bit images only)
+# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.2)
+#
+# Copyright (c) Secret Labs AB 1997-2001.
+# Copyright (c) Fredrik Lundh 1996-2001.
+#
+# See the README file for information on usage and redistribution.
+#
+
+
+import re
+
+from . import Image, ImageFile
+
+#
+# --------------------------------------------------------------------
+
+field = re.compile(rb"([a-z]*) ([^ \r\n]*)")
+
+
+##
+# Image plugin for IM Tools images.
+
+
+class ImtImageFile(ImageFile.ImageFile):
+ format = "IMT"
+ format_description = "IM Tools"
+
+ def _open(self):
+ # Quick rejection: if there's not a LF among the first
+ # 100 bytes, this is (probably) not a text header.
+
+ buffer = self.fp.read(100)
+ if b"\n" not in buffer:
+ msg = "not an IM file"
+ raise SyntaxError(msg)
+
+ xsize = ysize = 0
+
+ while True:
+ if buffer:
+ s = buffer[:1]
+ buffer = buffer[1:]
+ else:
+ s = self.fp.read(1)
+ if not s:
+ break
+
+ if s == b"\x0C":
+ # image data begins
+ self.tile = [
+ (
+ "raw",
+ (0, 0) + self.size,
+ self.fp.tell() - len(buffer),
+ (self.mode, 0, 1),
+ )
+ ]
+
+ break
+
+ else:
+ # read key/value pair
+ if b"\n" not in buffer:
+ buffer += self.fp.read(100)
+ lines = buffer.split(b"\n")
+ s += lines.pop(0)
+ buffer = b"\n".join(lines)
+ if len(s) == 1 or len(s) > 100:
+ break
+ if s[0] == ord(b"*"):
+ continue # comment
+
+ m = field.match(s)
+ if not m:
+ break
+ k, v = m.group(1, 2)
+ if k == b"width":
+ xsize = int(v)
+ self._size = xsize, ysize
+ elif k == b"height":
+ ysize = int(v)
+ self._size = xsize, ysize
+ elif k == b"pixel" and v == b"n8":
+ self._mode = "L"
+
+
+#
+# --------------------------------------------------------------------
+
+Image.register_open(ImtImageFile.format, ImtImageFile)
+
+#
+# no extension registered (".im" is simply too common)
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/IptcImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/IptcImagePlugin.py
new file mode 100644
index 00000000..316cd17c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/IptcImagePlugin.py
@@ -0,0 +1,230 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# IPTC/NAA file handling
+#
+# history:
+# 1995-10-01 fl Created
+# 1998-03-09 fl Cleaned up and added to PIL
+# 2002-06-18 fl Added getiptcinfo helper
+#
+# Copyright (c) Secret Labs AB 1997-2002.
+# Copyright (c) Fredrik Lundh 1995.
+#
+# See the README file for information on usage and redistribution.
+#
+import os
+import tempfile
+
+from . import Image, ImageFile
+from ._binary import i8
+from ._binary import i16be as i16
+from ._binary import i32be as i32
+from ._binary import o8
+
+COMPRESSION = {1: "raw", 5: "jpeg"}
+
+PAD = o8(0) * 4
+
+
+#
+# Helpers
+
+
+def i(c):
+ return i32((PAD + c)[-4:])
+
+
+def dump(c):
+ for i in c:
+ print("%02x" % i8(i), end=" ")
+ print()
+
+
+##
+# Image plugin for IPTC/NAA datastreams. To read IPTC/NAA fields
+# from TIFF and JPEG files, use the getiptcinfo function.
+
+
+class IptcImageFile(ImageFile.ImageFile):
+ format = "IPTC"
+ format_description = "IPTC/NAA"
+
+ def getint(self, key):
+ return i(self.info[key])
+
+ def field(self):
+ #
+ # get a IPTC field header
+ s = self.fp.read(5)
+ if not s.strip(b"\x00"):
+ return None, 0
+
+ tag = s[1], s[2]
+
+ # syntax
+ if s[0] != 0x1C or tag[0] not in [1, 2, 3, 4, 5, 6, 7, 8, 9, 240]:
+ msg = "invalid IPTC/NAA file"
+ raise SyntaxError(msg)
+
+ # field size
+ size = s[3]
+ if size > 132:
+ msg = "illegal field length in IPTC/NAA file"
+ raise OSError(msg)
+ elif size == 128:
+ size = 0
+ elif size > 128:
+ size = i(self.fp.read(size - 128))
+ else:
+ size = i16(s, 3)
+
+ return tag, size
+
+ def _open(self):
+ # load descriptive fields
+ while True:
+ offset = self.fp.tell()
+ tag, size = self.field()
+ if not tag or tag == (8, 10):
+ break
+ if size:
+ tagdata = self.fp.read(size)
+ else:
+ tagdata = None
+ if tag in self.info:
+ if isinstance(self.info[tag], list):
+ self.info[tag].append(tagdata)
+ else:
+ self.info[tag] = [self.info[tag], tagdata]
+ else:
+ self.info[tag] = tagdata
+
+ # mode
+ layers = i8(self.info[(3, 60)][0])
+ component = i8(self.info[(3, 60)][1])
+ if (3, 65) in self.info:
+ id = i8(self.info[(3, 65)][0]) - 1
+ else:
+ id = 0
+ if layers == 1 and not component:
+ self._mode = "L"
+ elif layers == 3 and component:
+ self._mode = "RGB"[id]
+ elif layers == 4 and component:
+ self._mode = "CMYK"[id]
+
+ # size
+ self._size = self.getint((3, 20)), self.getint((3, 30))
+
+ # compression
+ try:
+ compression = COMPRESSION[self.getint((3, 120))]
+ except KeyError as e:
+ msg = "Unknown IPTC image compression"
+ raise OSError(msg) from e
+
+ # tile
+ if tag == (8, 10):
+ self.tile = [
+ ("iptc", (compression, offset), (0, 0, self.size[0], self.size[1]))
+ ]
+
+ def load(self):
+ if len(self.tile) != 1 or self.tile[0][0] != "iptc":
+ return ImageFile.ImageFile.load(self)
+
+ type, tile, box = self.tile[0]
+
+ encoding, offset = tile
+
+ self.fp.seek(offset)
+
+ # Copy image data to temporary file
+ o_fd, outfile = tempfile.mkstemp(text=False)
+ o = os.fdopen(o_fd)
+ if encoding == "raw":
+ # To simplify access to the extracted file,
+ # prepend a PPM header
+ o.write("P5\n%d %d\n255\n" % self.size)
+ while True:
+ type, size = self.field()
+ if type != (8, 10):
+ break
+ while size > 0:
+ s = self.fp.read(min(size, 8192))
+ if not s:
+ break
+ o.write(s)
+ size -= len(s)
+ o.close()
+
+ try:
+ with Image.open(outfile) as _im:
+ _im.load()
+ self.im = _im.im
+ finally:
+ try:
+ os.unlink(outfile)
+ except OSError:
+ pass
+
+
+Image.register_open(IptcImageFile.format, IptcImageFile)
+
+Image.register_extension(IptcImageFile.format, ".iim")
+
+
+def getiptcinfo(im):
+ """
+ Get IPTC information from TIFF, JPEG, or IPTC file.
+
+ :param im: An image containing IPTC data.
+ :returns: A dictionary containing IPTC information, or None if
+ no IPTC information block was found.
+ """
+ import io
+
+ from . import JpegImagePlugin, TiffImagePlugin
+
+ data = None
+
+ if isinstance(im, IptcImageFile):
+ # return info dictionary right away
+ return im.info
+
+ elif isinstance(im, JpegImagePlugin.JpegImageFile):
+ # extract the IPTC/NAA resource
+ photoshop = im.info.get("photoshop")
+ if photoshop:
+ data = photoshop.get(0x0404)
+
+ elif isinstance(im, TiffImagePlugin.TiffImageFile):
+ # get raw data from the IPTC/NAA tag (PhotoShop tags the data
+ # as 4-byte integers, so we cannot use the get method...)
+ try:
+ data = im.tag.tagdata[TiffImagePlugin.IPTC_NAA_CHUNK]
+ except (AttributeError, KeyError):
+ pass
+
+ if data is None:
+ return None # no properties
+
+ # create an IptcImagePlugin object without initializing it
+ class FakeImage:
+ pass
+
+ im = FakeImage()
+ im.__class__ = IptcImageFile
+
+ # parse the IPTC information chunk
+ im.info = {}
+ im.fp = io.BytesIO(data)
+
+ try:
+ im._open()
+ except (IndexError, KeyError):
+ pass # expected failure
+
+ return im.info
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/Jpeg2KImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/Jpeg2KImagePlugin.py
new file mode 100644
index 00000000..963d6c1a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/Jpeg2KImagePlugin.py
@@ -0,0 +1,399 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# JPEG2000 file handling
+#
+# History:
+# 2014-03-12 ajh Created
+# 2021-06-30 rogermb Extract dpi information from the 'resc' header box
+#
+# Copyright (c) 2014 Coriolis Systems Limited
+# Copyright (c) 2014 Alastair Houghton
+#
+# See the README file for information on usage and redistribution.
+#
+import io
+import os
+import struct
+
+from . import Image, ImageFile, _binary
+
+
+class BoxReader:
+ """
+ A small helper class to read fields stored in JPEG2000 header boxes
+ and to easily step into and read sub-boxes.
+ """
+
+ def __init__(self, fp, length=-1):
+ self.fp = fp
+ self.has_length = length >= 0
+ self.length = length
+ self.remaining_in_box = -1
+
+ def _can_read(self, num_bytes):
+ if self.has_length and self.fp.tell() + num_bytes > self.length:
+ # Outside box: ensure we don't read past the known file length
+ return False
+ if self.remaining_in_box >= 0:
+ # Inside box contents: ensure read does not go past box boundaries
+ return num_bytes <= self.remaining_in_box
+ else:
+ return True # No length known, just read
+
+ def _read_bytes(self, num_bytes):
+ if not self._can_read(num_bytes):
+ msg = "Not enough data in header"
+ raise SyntaxError(msg)
+
+ data = self.fp.read(num_bytes)
+ if len(data) < num_bytes:
+ msg = f"Expected to read {num_bytes} bytes but only got {len(data)}."
+ raise OSError(msg)
+
+ if self.remaining_in_box > 0:
+ self.remaining_in_box -= num_bytes
+ return data
+
+ def read_fields(self, field_format):
+ size = struct.calcsize(field_format)
+ data = self._read_bytes(size)
+ return struct.unpack(field_format, data)
+
+ def read_boxes(self):
+ size = self.remaining_in_box
+ data = self._read_bytes(size)
+ return BoxReader(io.BytesIO(data), size)
+
+ def has_next_box(self):
+ if self.has_length:
+ return self.fp.tell() + self.remaining_in_box < self.length
+ else:
+ return True
+
+ def next_box_type(self):
+ # Skip the rest of the box if it has not been read
+ if self.remaining_in_box > 0:
+ self.fp.seek(self.remaining_in_box, os.SEEK_CUR)
+ self.remaining_in_box = -1
+
+ # Read the length and type of the next box
+ lbox, tbox = self.read_fields(">I4s")
+ if lbox == 1:
+ lbox = self.read_fields(">Q")[0]
+ hlen = 16
+ else:
+ hlen = 8
+
+ if lbox < hlen or not self._can_read(lbox - hlen):
+ msg = "Invalid header length"
+ raise SyntaxError(msg)
+
+ self.remaining_in_box = lbox - hlen
+ return tbox
+
+
+def _parse_codestream(fp):
+ """Parse the JPEG 2000 codestream to extract the size and component
+ count from the SIZ marker segment, returning a PIL (size, mode) tuple."""
+
+ hdr = fp.read(2)
+ lsiz = _binary.i16be(hdr)
+ siz = hdr + fp.read(lsiz - 2)
+ lsiz, rsiz, xsiz, ysiz, xosiz, yosiz, _, _, _, _, csiz = struct.unpack_from(
+ ">HHIIIIIIIIH", siz
+ )
+ ssiz = [None] * csiz
+ xrsiz = [None] * csiz
+ yrsiz = [None] * csiz
+ for i in range(csiz):
+ ssiz[i], xrsiz[i], yrsiz[i] = struct.unpack_from(">BBB", siz, 36 + 3 * i)
+
+ size = (xsiz - xosiz, ysiz - yosiz)
+ if csiz == 1:
+ if (yrsiz[0] & 0x7F) > 8:
+ mode = "I;16"
+ else:
+ mode = "L"
+ elif csiz == 2:
+ mode = "LA"
+ elif csiz == 3:
+ mode = "RGB"
+ elif csiz == 4:
+ mode = "RGBA"
+ else:
+ mode = None
+
+ return size, mode
+
+
+def _res_to_dpi(num, denom, exp):
+ """Convert JPEG2000's (numerator, denominator, exponent-base-10) resolution,
+ calculated as (num / denom) * 10^exp and stored in dots per meter,
+ to floating-point dots per inch."""
+ if denom != 0:
+ return (254 * num * (10**exp)) / (10000 * denom)
+
+
+def _parse_jp2_header(fp):
+ """Parse the JP2 header box to extract size, component count,
+ color space information, and optionally DPI information,
+ returning a (size, mode, mimetype, dpi) tuple."""
+
+ # Find the JP2 header box
+ reader = BoxReader(fp)
+ header = None
+ mimetype = None
+ while reader.has_next_box():
+ tbox = reader.next_box_type()
+
+ if tbox == b"jp2h":
+ header = reader.read_boxes()
+ break
+ elif tbox == b"ftyp":
+ if reader.read_fields(">4s")[0] == b"jpx ":
+ mimetype = "image/jpx"
+
+ size = None
+ mode = None
+ bpc = None
+ nc = None
+ dpi = None # 2-tuple of DPI info, or None
+
+ while header.has_next_box():
+ tbox = header.next_box_type()
+
+ if tbox == b"ihdr":
+ height, width, nc, bpc = header.read_fields(">IIHB")
+ size = (width, height)
+ if nc == 1 and (bpc & 0x7F) > 8:
+ mode = "I;16"
+ elif nc == 1:
+ mode = "L"
+ elif nc == 2:
+ mode = "LA"
+ elif nc == 3:
+ mode = "RGB"
+ elif nc == 4:
+ mode = "RGBA"
+ elif tbox == b"res ":
+ res = header.read_boxes()
+ while res.has_next_box():
+ tres = res.next_box_type()
+ if tres == b"resc":
+ vrcn, vrcd, hrcn, hrcd, vrce, hrce = res.read_fields(">HHHHBB")
+ hres = _res_to_dpi(hrcn, hrcd, hrce)
+ vres = _res_to_dpi(vrcn, vrcd, vrce)
+ if hres is not None and vres is not None:
+ dpi = (hres, vres)
+ break
+
+ if size is None or mode is None:
+ msg = "Malformed JP2 header"
+ raise SyntaxError(msg)
+
+ return size, mode, mimetype, dpi
+
+
+##
+# Image plugin for JPEG2000 images.
+
+
+class Jpeg2KImageFile(ImageFile.ImageFile):
+ format = "JPEG2000"
+ format_description = "JPEG 2000 (ISO 15444)"
+
+ def _open(self):
+ sig = self.fp.read(4)
+ if sig == b"\xff\x4f\xff\x51":
+ self.codec = "j2k"
+ self._size, self._mode = _parse_codestream(self.fp)
+ else:
+ sig = sig + self.fp.read(8)
+
+ if sig == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a":
+ self.codec = "jp2"
+ header = _parse_jp2_header(self.fp)
+ self._size, self._mode, self.custom_mimetype, dpi = header
+ if dpi is not None:
+ self.info["dpi"] = dpi
+ if self.fp.read(12).endswith(b"jp2c\xff\x4f\xff\x51"):
+ self._parse_comment()
+ else:
+ msg = "not a JPEG 2000 file"
+ raise SyntaxError(msg)
+
+ if self.size is None or self.mode is None:
+ msg = "unable to determine size/mode"
+ raise SyntaxError(msg)
+
+ self._reduce = 0
+ self.layers = 0
+
+ fd = -1
+ length = -1
+
+ try:
+ fd = self.fp.fileno()
+ length = os.fstat(fd).st_size
+ except Exception:
+ fd = -1
+ try:
+ pos = self.fp.tell()
+ self.fp.seek(0, io.SEEK_END)
+ length = self.fp.tell()
+ self.fp.seek(pos)
+ except Exception:
+ length = -1
+
+ self.tile = [
+ (
+ "jpeg2k",
+ (0, 0) + self.size,
+ 0,
+ (self.codec, self._reduce, self.layers, fd, length),
+ )
+ ]
+
+ def _parse_comment(self):
+ hdr = self.fp.read(2)
+ length = _binary.i16be(hdr)
+ self.fp.seek(length - 2, os.SEEK_CUR)
+
+ while True:
+ marker = self.fp.read(2)
+ if not marker:
+ break
+ typ = marker[1]
+ if typ in (0x90, 0xD9):
+ # Start of tile or end of codestream
+ break
+ hdr = self.fp.read(2)
+ length = _binary.i16be(hdr)
+ if typ == 0x64:
+ # Comment
+ self.info["comment"] = self.fp.read(length - 2)[2:]
+ break
+ else:
+ self.fp.seek(length - 2, os.SEEK_CUR)
+
+ @property
+ def reduce(self):
+ # https://github.com/python-pillow/Pillow/issues/4343 found that the
+ # new Image 'reduce' method was shadowed by this plugin's 'reduce'
+ # property. This attempts to allow for both scenarios
+ return self._reduce or super().reduce
+
+ @reduce.setter
+ def reduce(self, value):
+ self._reduce = value
+
+ def load(self):
+ if self.tile and self._reduce:
+ power = 1 << self._reduce
+ adjust = power >> 1
+ self._size = (
+ int((self.size[0] + adjust) / power),
+ int((self.size[1] + adjust) / power),
+ )
+
+ # Update the reduce and layers settings
+ t = self.tile[0]
+ t3 = (t[3][0], self._reduce, self.layers, t[3][3], t[3][4])
+ self.tile = [(t[0], (0, 0) + self.size, t[2], t3)]
+
+ return ImageFile.ImageFile.load(self)
+
+
+def _accept(prefix):
+ return (
+ prefix[:4] == b"\xff\x4f\xff\x51"
+ or prefix[:12] == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a"
+ )
+
+
+# ------------------------------------------------------------
+# Save support
+
+
+def _save(im, fp, filename):
+ # Get the keyword arguments
+ info = im.encoderinfo
+
+ if filename.endswith(".j2k") or info.get("no_jp2", False):
+ kind = "j2k"
+ else:
+ kind = "jp2"
+
+ offset = info.get("offset", None)
+ tile_offset = info.get("tile_offset", None)
+ tile_size = info.get("tile_size", None)
+ quality_mode = info.get("quality_mode", "rates")
+ quality_layers = info.get("quality_layers", None)
+ if quality_layers is not None and not (
+ isinstance(quality_layers, (list, tuple))
+ and all(
+ [
+ isinstance(quality_layer, (int, float))
+ for quality_layer in quality_layers
+ ]
+ )
+ ):
+ msg = "quality_layers must be a sequence of numbers"
+ raise ValueError(msg)
+
+ num_resolutions = info.get("num_resolutions", 0)
+ cblk_size = info.get("codeblock_size", None)
+ precinct_size = info.get("precinct_size", None)
+ irreversible = info.get("irreversible", False)
+ progression = info.get("progression", "LRCP")
+ cinema_mode = info.get("cinema_mode", "no")
+ mct = info.get("mct", 0)
+ signed = info.get("signed", False)
+ comment = info.get("comment")
+ if isinstance(comment, str):
+ comment = comment.encode()
+ plt = info.get("plt", False)
+
+ fd = -1
+ if hasattr(fp, "fileno"):
+ try:
+ fd = fp.fileno()
+ except Exception:
+ fd = -1
+
+ im.encoderconfig = (
+ offset,
+ tile_offset,
+ tile_size,
+ quality_mode,
+ quality_layers,
+ num_resolutions,
+ cblk_size,
+ precinct_size,
+ irreversible,
+ progression,
+ cinema_mode,
+ mct,
+ signed,
+ fd,
+ comment,
+ plt,
+ )
+
+ ImageFile._save(im, fp, [("jpeg2k", (0, 0) + im.size, 0, kind)])
+
+
+# ------------------------------------------------------------
+# Registry stuff
+
+
+Image.register_open(Jpeg2KImageFile.format, Jpeg2KImageFile, _accept)
+Image.register_save(Jpeg2KImageFile.format, _save)
+
+Image.register_extensions(
+ Jpeg2KImageFile.format, [".jp2", ".j2k", ".jpc", ".jpf", ".jpx", ".j2c"]
+)
+
+Image.register_mime(Jpeg2KImageFile.format, "image/jp2")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/JpegImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/JpegImagePlugin.py
new file mode 100644
index 00000000..917bbf39
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/JpegImagePlugin.py
@@ -0,0 +1,861 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# JPEG (JFIF) file handling
+#
+# See "Digital Compression and Coding of Continuous-Tone Still Images,
+# Part 1, Requirements and Guidelines" (CCITT T.81 / ISO 10918-1)
+#
+# History:
+# 1995-09-09 fl Created
+# 1995-09-13 fl Added full parser
+# 1996-03-25 fl Added hack to use the IJG command line utilities
+# 1996-05-05 fl Workaround Photoshop 2.5 CMYK polarity bug
+# 1996-05-28 fl Added draft support, JFIF version (0.1)
+# 1996-12-30 fl Added encoder options, added progression property (0.2)
+# 1997-08-27 fl Save mode 1 images as BW (0.3)
+# 1998-07-12 fl Added YCbCr to draft and save methods (0.4)
+# 1998-10-19 fl Don't hang on files using 16-bit DQT's (0.4.1)
+# 2001-04-16 fl Extract DPI settings from JFIF files (0.4.2)
+# 2002-07-01 fl Skip pad bytes before markers; identify Exif files (0.4.3)
+# 2003-04-25 fl Added experimental EXIF decoder (0.5)
+# 2003-06-06 fl Added experimental EXIF GPSinfo decoder
+# 2003-09-13 fl Extract COM markers
+# 2009-09-06 fl Added icc_profile support (from Florian Hoech)
+# 2009-03-06 fl Changed CMYK handling; always use Adobe polarity (0.6)
+# 2009-03-08 fl Added subsampling support (from Justin Huff).
+#
+# Copyright (c) 1997-2003 by Secret Labs AB.
+# Copyright (c) 1995-1996 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+import array
+import io
+import math
+import os
+import struct
+import subprocess
+import sys
+import tempfile
+import warnings
+
+from . import Image, ImageFile
+from ._binary import i16be as i16
+from ._binary import i32be as i32
+from ._binary import o8
+from ._binary import o16be as o16
+from .JpegPresets import presets
+
+#
+# Parser
+
+
+def Skip(self, marker):
+ n = i16(self.fp.read(2)) - 2
+ ImageFile._safe_read(self.fp, n)
+
+
+def APP(self, marker):
+ #
+ # Application marker. Store these in the APP dictionary.
+ # Also look for well-known application markers.
+
+ n = i16(self.fp.read(2)) - 2
+ s = ImageFile._safe_read(self.fp, n)
+
+ app = "APP%d" % (marker & 15)
+
+ self.app[app] = s # compatibility
+ self.applist.append((app, s))
+
+ if marker == 0xFFE0 and s[:4] == b"JFIF":
+ # extract JFIF information
+ self.info["jfif"] = version = i16(s, 5) # version
+ self.info["jfif_version"] = divmod(version, 256)
+ # extract JFIF properties
+ try:
+ jfif_unit = s[7]
+ jfif_density = i16(s, 8), i16(s, 10)
+ except Exception:
+ pass
+ else:
+ if jfif_unit == 1:
+ self.info["dpi"] = jfif_density
+ self.info["jfif_unit"] = jfif_unit
+ self.info["jfif_density"] = jfif_density
+ elif marker == 0xFFE1 and s[:5] == b"Exif\0":
+ if "exif" not in self.info:
+ # extract EXIF information (incomplete)
+ self.info["exif"] = s # FIXME: value will change
+ self._exif_offset = self.fp.tell() - n + 6
+ elif marker == 0xFFE2 and s[:5] == b"FPXR\0":
+ # extract FlashPix information (incomplete)
+ self.info["flashpix"] = s # FIXME: value will change
+ elif marker == 0xFFE2 and s[:12] == b"ICC_PROFILE\0":
+ # Since an ICC profile can be larger than the maximum size of
+ # a JPEG marker (64K), we need provisions to split it into
+ # multiple markers. The format defined by the ICC specifies
+ # one or more APP2 markers containing the following data:
+ # Identifying string ASCII "ICC_PROFILE\0" (12 bytes)
+ # Marker sequence number 1, 2, etc (1 byte)
+ # Number of markers Total of APP2's used (1 byte)
+ # Profile data (remainder of APP2 data)
+ # Decoders should use the marker sequence numbers to
+ # reassemble the profile, rather than assuming that the APP2
+ # markers appear in the correct sequence.
+ self.icclist.append(s)
+ elif marker == 0xFFED and s[:14] == b"Photoshop 3.0\x00":
+ # parse the image resource block
+ offset = 14
+ photoshop = self.info.setdefault("photoshop", {})
+ while s[offset : offset + 4] == b"8BIM":
+ try:
+ offset += 4
+ # resource code
+ code = i16(s, offset)
+ offset += 2
+ # resource name (usually empty)
+ name_len = s[offset]
+ # name = s[offset+1:offset+1+name_len]
+ offset += 1 + name_len
+ offset += offset & 1 # align
+ # resource data block
+ size = i32(s, offset)
+ offset += 4
+ data = s[offset : offset + size]
+ if code == 0x03ED: # ResolutionInfo
+ data = {
+ "XResolution": i32(data, 0) / 65536,
+ "DisplayedUnitsX": i16(data, 4),
+ "YResolution": i32(data, 8) / 65536,
+ "DisplayedUnitsY": i16(data, 12),
+ }
+ photoshop[code] = data
+ offset += size
+ offset += offset & 1 # align
+ except struct.error:
+ break # insufficient data
+
+ elif marker == 0xFFEE and s[:5] == b"Adobe":
+ self.info["adobe"] = i16(s, 5)
+ # extract Adobe custom properties
+ try:
+ adobe_transform = s[11]
+ except IndexError:
+ pass
+ else:
+ self.info["adobe_transform"] = adobe_transform
+ elif marker == 0xFFE2 and s[:4] == b"MPF\0":
+ # extract MPO information
+ self.info["mp"] = s[4:]
+ # offset is current location minus buffer size
+ # plus constant header size
+ self.info["mpoffset"] = self.fp.tell() - n + 4
+
+ # If DPI isn't in JPEG header, fetch from EXIF
+ if "dpi" not in self.info and "exif" in self.info:
+ try:
+ exif = self.getexif()
+ resolution_unit = exif[0x0128]
+ x_resolution = exif[0x011A]
+ try:
+ dpi = float(x_resolution[0]) / x_resolution[1]
+ except TypeError:
+ dpi = x_resolution
+ if math.isnan(dpi):
+ raise ValueError
+ if resolution_unit == 3: # cm
+ # 1 dpcm = 2.54 dpi
+ dpi *= 2.54
+ self.info["dpi"] = dpi, dpi
+ except (
+ struct.error,
+ KeyError,
+ SyntaxError,
+ TypeError,
+ ValueError,
+ ZeroDivisionError,
+ ):
+ # struct.error for truncated EXIF
+ # KeyError for dpi not included
+ # SyntaxError for invalid/unreadable EXIF
+ # ValueError or TypeError for dpi being an invalid float
+ # ZeroDivisionError for invalid dpi rational value
+ self.info["dpi"] = 72, 72
+
+
+def COM(self, marker):
+ #
+ # Comment marker. Store these in the APP dictionary.
+ n = i16(self.fp.read(2)) - 2
+ s = ImageFile._safe_read(self.fp, n)
+
+ self.info["comment"] = s
+ self.app["COM"] = s # compatibility
+ self.applist.append(("COM", s))
+
+
+def SOF(self, marker):
+ #
+ # Start of frame marker. Defines the size and mode of the
+ # image. JPEG is colour blind, so we use some simple
+ # heuristics to map the number of layers to an appropriate
+ # mode. Note that this could be made a bit brighter, by
+ # looking for JFIF and Adobe APP markers.
+
+ n = i16(self.fp.read(2)) - 2
+ s = ImageFile._safe_read(self.fp, n)
+ self._size = i16(s, 3), i16(s, 1)
+
+ self.bits = s[0]
+ if self.bits != 8:
+ msg = f"cannot handle {self.bits}-bit layers"
+ raise SyntaxError(msg)
+
+ self.layers = s[5]
+ if self.layers == 1:
+ self._mode = "L"
+ elif self.layers == 3:
+ self._mode = "RGB"
+ elif self.layers == 4:
+ self._mode = "CMYK"
+ else:
+ msg = f"cannot handle {self.layers}-layer images"
+ raise SyntaxError(msg)
+
+ if marker in [0xFFC2, 0xFFC6, 0xFFCA, 0xFFCE]:
+ self.info["progressive"] = self.info["progression"] = 1
+
+ if self.icclist:
+ # fixup icc profile
+ self.icclist.sort() # sort by sequence number
+ if self.icclist[0][13] == len(self.icclist):
+ profile = []
+ for p in self.icclist:
+ profile.append(p[14:])
+ icc_profile = b"".join(profile)
+ else:
+ icc_profile = None # wrong number of fragments
+ self.info["icc_profile"] = icc_profile
+ self.icclist = []
+
+ for i in range(6, len(s), 3):
+ t = s[i : i + 3]
+ # 4-tuples: id, vsamp, hsamp, qtable
+ self.layer.append((t[0], t[1] // 16, t[1] & 15, t[2]))
+
+
+def DQT(self, marker):
+ #
+ # Define quantization table. Note that there might be more
+ # than one table in each marker.
+
+ # FIXME: The quantization tables can be used to estimate the
+ # compression quality.
+
+ n = i16(self.fp.read(2)) - 2
+ s = ImageFile._safe_read(self.fp, n)
+ while len(s):
+ v = s[0]
+ precision = 1 if (v // 16 == 0) else 2 # in bytes
+ qt_length = 1 + precision * 64
+ if len(s) < qt_length:
+ msg = "bad quantization table marker"
+ raise SyntaxError(msg)
+ data = array.array("B" if precision == 1 else "H", s[1:qt_length])
+ if sys.byteorder == "little" and precision > 1:
+ data.byteswap() # the values are always big-endian
+ self.quantization[v & 15] = [data[i] for i in zigzag_index]
+ s = s[qt_length:]
+
+
+#
+# JPEG marker table
+
+MARKER = {
+ 0xFFC0: ("SOF0", "Baseline DCT", SOF),
+ 0xFFC1: ("SOF1", "Extended Sequential DCT", SOF),
+ 0xFFC2: ("SOF2", "Progressive DCT", SOF),
+ 0xFFC3: ("SOF3", "Spatial lossless", SOF),
+ 0xFFC4: ("DHT", "Define Huffman table", Skip),
+ 0xFFC5: ("SOF5", "Differential sequential DCT", SOF),
+ 0xFFC6: ("SOF6", "Differential progressive DCT", SOF),
+ 0xFFC7: ("SOF7", "Differential spatial", SOF),
+ 0xFFC8: ("JPG", "Extension", None),
+ 0xFFC9: ("SOF9", "Extended sequential DCT (AC)", SOF),
+ 0xFFCA: ("SOF10", "Progressive DCT (AC)", SOF),
+ 0xFFCB: ("SOF11", "Spatial lossless DCT (AC)", SOF),
+ 0xFFCC: ("DAC", "Define arithmetic coding conditioning", Skip),
+ 0xFFCD: ("SOF13", "Differential sequential DCT (AC)", SOF),
+ 0xFFCE: ("SOF14", "Differential progressive DCT (AC)", SOF),
+ 0xFFCF: ("SOF15", "Differential spatial (AC)", SOF),
+ 0xFFD0: ("RST0", "Restart 0", None),
+ 0xFFD1: ("RST1", "Restart 1", None),
+ 0xFFD2: ("RST2", "Restart 2", None),
+ 0xFFD3: ("RST3", "Restart 3", None),
+ 0xFFD4: ("RST4", "Restart 4", None),
+ 0xFFD5: ("RST5", "Restart 5", None),
+ 0xFFD6: ("RST6", "Restart 6", None),
+ 0xFFD7: ("RST7", "Restart 7", None),
+ 0xFFD8: ("SOI", "Start of image", None),
+ 0xFFD9: ("EOI", "End of image", None),
+ 0xFFDA: ("SOS", "Start of scan", Skip),
+ 0xFFDB: ("DQT", "Define quantization table", DQT),
+ 0xFFDC: ("DNL", "Define number of lines", Skip),
+ 0xFFDD: ("DRI", "Define restart interval", Skip),
+ 0xFFDE: ("DHP", "Define hierarchical progression", SOF),
+ 0xFFDF: ("EXP", "Expand reference component", Skip),
+ 0xFFE0: ("APP0", "Application segment 0", APP),
+ 0xFFE1: ("APP1", "Application segment 1", APP),
+ 0xFFE2: ("APP2", "Application segment 2", APP),
+ 0xFFE3: ("APP3", "Application segment 3", APP),
+ 0xFFE4: ("APP4", "Application segment 4", APP),
+ 0xFFE5: ("APP5", "Application segment 5", APP),
+ 0xFFE6: ("APP6", "Application segment 6", APP),
+ 0xFFE7: ("APP7", "Application segment 7", APP),
+ 0xFFE8: ("APP8", "Application segment 8", APP),
+ 0xFFE9: ("APP9", "Application segment 9", APP),
+ 0xFFEA: ("APP10", "Application segment 10", APP),
+ 0xFFEB: ("APP11", "Application segment 11", APP),
+ 0xFFEC: ("APP12", "Application segment 12", APP),
+ 0xFFED: ("APP13", "Application segment 13", APP),
+ 0xFFEE: ("APP14", "Application segment 14", APP),
+ 0xFFEF: ("APP15", "Application segment 15", APP),
+ 0xFFF0: ("JPG0", "Extension 0", None),
+ 0xFFF1: ("JPG1", "Extension 1", None),
+ 0xFFF2: ("JPG2", "Extension 2", None),
+ 0xFFF3: ("JPG3", "Extension 3", None),
+ 0xFFF4: ("JPG4", "Extension 4", None),
+ 0xFFF5: ("JPG5", "Extension 5", None),
+ 0xFFF6: ("JPG6", "Extension 6", None),
+ 0xFFF7: ("JPG7", "Extension 7", None),
+ 0xFFF8: ("JPG8", "Extension 8", None),
+ 0xFFF9: ("JPG9", "Extension 9", None),
+ 0xFFFA: ("JPG10", "Extension 10", None),
+ 0xFFFB: ("JPG11", "Extension 11", None),
+ 0xFFFC: ("JPG12", "Extension 12", None),
+ 0xFFFD: ("JPG13", "Extension 13", None),
+ 0xFFFE: ("COM", "Comment", COM),
+}
+
+
+def _accept(prefix):
+ # Magic number was taken from https://en.wikipedia.org/wiki/JPEG
+ return prefix[:3] == b"\xFF\xD8\xFF"
+
+
+##
+# Image plugin for JPEG and JFIF images.
+
+
+class JpegImageFile(ImageFile.ImageFile):
+ format = "JPEG"
+ format_description = "JPEG (ISO 10918)"
+
+ def _open(self):
+ s = self.fp.read(3)
+
+ if not _accept(s):
+ msg = "not a JPEG file"
+ raise SyntaxError(msg)
+ s = b"\xFF"
+
+ # Create attributes
+ self.bits = self.layers = 0
+
+ # JPEG specifics (internal)
+ self.layer = []
+ self.huffman_dc = {}
+ self.huffman_ac = {}
+ self.quantization = {}
+ self.app = {} # compatibility
+ self.applist = []
+ self.icclist = []
+
+ while True:
+ i = s[0]
+ if i == 0xFF:
+ s = s + self.fp.read(1)
+ i = i16(s)
+ else:
+ # Skip non-0xFF junk
+ s = self.fp.read(1)
+ continue
+
+ if i in MARKER:
+ name, description, handler = MARKER[i]
+ if handler is not None:
+ handler(self, i)
+ if i == 0xFFDA: # start of scan
+ rawmode = self.mode
+ if self.mode == "CMYK":
+ rawmode = "CMYK;I" # assume adobe conventions
+ self.tile = [("jpeg", (0, 0) + self.size, 0, (rawmode, ""))]
+ # self.__offset = self.fp.tell()
+ break
+ s = self.fp.read(1)
+ elif i == 0 or i == 0xFFFF:
+ # padded marker or junk; move on
+ s = b"\xff"
+ elif i == 0xFF00: # Skip extraneous data (escaped 0xFF)
+ s = self.fp.read(1)
+ else:
+ msg = "no marker found"
+ raise SyntaxError(msg)
+
+ def load_read(self, read_bytes):
+ """
+ internal: read more image data
+ For premature EOF and LOAD_TRUNCATED_IMAGES adds EOI marker
+ so libjpeg can finish decoding
+ """
+ s = self.fp.read(read_bytes)
+
+ if not s and ImageFile.LOAD_TRUNCATED_IMAGES and not hasattr(self, "_ended"):
+ # Premature EOF.
+ # Pretend file is finished adding EOI marker
+ self._ended = True
+ return b"\xFF\xD9"
+
+ return s
+
+ def draft(self, mode, size):
+ if len(self.tile) != 1:
+ return
+
+ # Protect from second call
+ if self.decoderconfig:
+ return
+
+ d, e, o, a = self.tile[0]
+ scale = 1
+ original_size = self.size
+
+ if a[0] == "RGB" and mode in ["L", "YCbCr"]:
+ self._mode = mode
+ a = mode, ""
+
+ if size:
+ scale = min(self.size[0] // size[0], self.size[1] // size[1])
+ for s in [8, 4, 2, 1]:
+ if scale >= s:
+ break
+ e = (
+ e[0],
+ e[1],
+ (e[2] - e[0] + s - 1) // s + e[0],
+ (e[3] - e[1] + s - 1) // s + e[1],
+ )
+ self._size = ((self.size[0] + s - 1) // s, (self.size[1] + s - 1) // s)
+ scale = s
+
+ self.tile = [(d, e, o, a)]
+ self.decoderconfig = (scale, 0)
+
+ box = (0, 0, original_size[0] / scale, original_size[1] / scale)
+ return self.mode, box
+
+ def load_djpeg(self):
+ # ALTERNATIVE: handle JPEGs via the IJG command line utilities
+
+ f, path = tempfile.mkstemp()
+ os.close(f)
+ if os.path.exists(self.filename):
+ subprocess.check_call(["djpeg", "-outfile", path, self.filename])
+ else:
+ try:
+ os.unlink(path)
+ except OSError:
+ pass
+
+ msg = "Invalid Filename"
+ raise ValueError(msg)
+
+ try:
+ with Image.open(path) as _im:
+ _im.load()
+ self.im = _im.im
+ finally:
+ try:
+ os.unlink(path)
+ except OSError:
+ pass
+
+ self._mode = self.im.mode
+ self._size = self.im.size
+
+ self.tile = []
+
+ def _getexif(self):
+ return _getexif(self)
+
+ def _getmp(self):
+ return _getmp(self)
+
+ def getxmp(self):
+ """
+ Returns a dictionary containing the XMP tags.
+ Requires defusedxml to be installed.
+
+ :returns: XMP tags in a dictionary.
+ """
+
+ for segment, content in self.applist:
+ if segment == "APP1":
+ marker, xmp_tags = content.split(b"\x00")[:2]
+ if marker == b"http://ns.adobe.com/xap/1.0/":
+ return self._getxmp(xmp_tags)
+ return {}
+
+
+def _getexif(self):
+ if "exif" not in self.info:
+ return None
+ return self.getexif()._get_merged_dict()
+
+
+def _getmp(self):
+ # Extract MP information. This method was inspired by the "highly
+ # experimental" _getexif version that's been in use for years now,
+ # itself based on the ImageFileDirectory class in the TIFF plugin.
+
+ # The MP record essentially consists of a TIFF file embedded in a JPEG
+ # application marker.
+ try:
+ data = self.info["mp"]
+ except KeyError:
+ return None
+ file_contents = io.BytesIO(data)
+ head = file_contents.read(8)
+ endianness = ">" if head[:4] == b"\x4d\x4d\x00\x2a" else "<"
+ # process dictionary
+ from . import TiffImagePlugin
+
+ try:
+ info = TiffImagePlugin.ImageFileDirectory_v2(head)
+ file_contents.seek(info.next)
+ info.load(file_contents)
+ mp = dict(info)
+ except Exception as e:
+ msg = "malformed MP Index (unreadable directory)"
+ raise SyntaxError(msg) from e
+ # it's an error not to have a number of images
+ try:
+ quant = mp[0xB001]
+ except KeyError as e:
+ msg = "malformed MP Index (no number of images)"
+ raise SyntaxError(msg) from e
+ # get MP entries
+ mpentries = []
+ try:
+ rawmpentries = mp[0xB002]
+ for entrynum in range(0, quant):
+ unpackedentry = struct.unpack_from(
+ f"{endianness}LLLHH", rawmpentries, entrynum * 16
+ )
+ labels = ("Attribute", "Size", "DataOffset", "EntryNo1", "EntryNo2")
+ mpentry = dict(zip(labels, unpackedentry))
+ mpentryattr = {
+ "DependentParentImageFlag": bool(mpentry["Attribute"] & (1 << 31)),
+ "DependentChildImageFlag": bool(mpentry["Attribute"] & (1 << 30)),
+ "RepresentativeImageFlag": bool(mpentry["Attribute"] & (1 << 29)),
+ "Reserved": (mpentry["Attribute"] & (3 << 27)) >> 27,
+ "ImageDataFormat": (mpentry["Attribute"] & (7 << 24)) >> 24,
+ "MPType": mpentry["Attribute"] & 0x00FFFFFF,
+ }
+ if mpentryattr["ImageDataFormat"] == 0:
+ mpentryattr["ImageDataFormat"] = "JPEG"
+ else:
+ msg = "unsupported picture format in MPO"
+ raise SyntaxError(msg)
+ mptypemap = {
+ 0x000000: "Undefined",
+ 0x010001: "Large Thumbnail (VGA Equivalent)",
+ 0x010002: "Large Thumbnail (Full HD Equivalent)",
+ 0x020001: "Multi-Frame Image (Panorama)",
+ 0x020002: "Multi-Frame Image: (Disparity)",
+ 0x020003: "Multi-Frame Image: (Multi-Angle)",
+ 0x030000: "Baseline MP Primary Image",
+ }
+ mpentryattr["MPType"] = mptypemap.get(mpentryattr["MPType"], "Unknown")
+ mpentry["Attribute"] = mpentryattr
+ mpentries.append(mpentry)
+ mp[0xB002] = mpentries
+ except KeyError as e:
+ msg = "malformed MP Index (bad MP Entry)"
+ raise SyntaxError(msg) from e
+ # Next we should try and parse the individual image unique ID list;
+ # we don't because I've never seen this actually used in a real MPO
+ # file and so can't test it.
+ return mp
+
+
+# --------------------------------------------------------------------
+# stuff to save JPEG files
+
+RAWMODE = {
+ "1": "L",
+ "L": "L",
+ "RGB": "RGB",
+ "RGBX": "RGB",
+ "CMYK": "CMYK;I", # assume adobe conventions
+ "YCbCr": "YCbCr",
+}
+
+# fmt: off
+zigzag_index = (
+ 0, 1, 5, 6, 14, 15, 27, 28,
+ 2, 4, 7, 13, 16, 26, 29, 42,
+ 3, 8, 12, 17, 25, 30, 41, 43,
+ 9, 11, 18, 24, 31, 40, 44, 53,
+ 10, 19, 23, 32, 39, 45, 52, 54,
+ 20, 22, 33, 38, 46, 51, 55, 60,
+ 21, 34, 37, 47, 50, 56, 59, 61,
+ 35, 36, 48, 49, 57, 58, 62, 63,
+)
+
+samplings = {
+ (1, 1, 1, 1, 1, 1): 0,
+ (2, 1, 1, 1, 1, 1): 1,
+ (2, 2, 1, 1, 1, 1): 2,
+}
+# fmt: on
+
+
+def get_sampling(im):
+ # There's no subsampling when images have only 1 layer
+ # (grayscale images) or when they are CMYK (4 layers),
+ # so set subsampling to the default value.
+ #
+ # NOTE: currently Pillow can't encode JPEG to YCCK format.
+ # If YCCK support is added in the future, subsampling code will have
+ # to be updated (here and in JpegEncode.c) to deal with 4 layers.
+ if not hasattr(im, "layers") or im.layers in (1, 4):
+ return -1
+ sampling = im.layer[0][1:3] + im.layer[1][1:3] + im.layer[2][1:3]
+ return samplings.get(sampling, -1)
+
+
+def _save(im, fp, filename):
+ if im.width == 0 or im.height == 0:
+ msg = "cannot write empty image as JPEG"
+ raise ValueError(msg)
+
+ try:
+ rawmode = RAWMODE[im.mode]
+ except KeyError as e:
+ msg = f"cannot write mode {im.mode} as JPEG"
+ raise OSError(msg) from e
+
+ info = im.encoderinfo
+
+ dpi = [round(x) for x in info.get("dpi", (0, 0))]
+
+ quality = info.get("quality", -1)
+ subsampling = info.get("subsampling", -1)
+ qtables = info.get("qtables")
+
+ if quality == "keep":
+ quality = -1
+ subsampling = "keep"
+ qtables = "keep"
+ elif quality in presets:
+ preset = presets[quality]
+ quality = -1
+ subsampling = preset.get("subsampling", -1)
+ qtables = preset.get("quantization")
+ elif not isinstance(quality, int):
+ msg = "Invalid quality setting"
+ raise ValueError(msg)
+ else:
+ if subsampling in presets:
+ subsampling = presets[subsampling].get("subsampling", -1)
+ if isinstance(qtables, str) and qtables in presets:
+ qtables = presets[qtables].get("quantization")
+
+ if subsampling == "4:4:4":
+ subsampling = 0
+ elif subsampling == "4:2:2":
+ subsampling = 1
+ elif subsampling == "4:2:0":
+ subsampling = 2
+ elif subsampling == "4:1:1":
+ # For compatibility. Before Pillow 4.3, 4:1:1 actually meant 4:2:0.
+ # Set 4:2:0 if someone is still using that value.
+ subsampling = 2
+ elif subsampling == "keep":
+ if im.format != "JPEG":
+ msg = "Cannot use 'keep' when original image is not a JPEG"
+ raise ValueError(msg)
+ subsampling = get_sampling(im)
+
+ def validate_qtables(qtables):
+ if qtables is None:
+ return qtables
+ if isinstance(qtables, str):
+ try:
+ lines = [
+ int(num)
+ for line in qtables.splitlines()
+ for num in line.split("#", 1)[0].split()
+ ]
+ except ValueError as e:
+ msg = "Invalid quantization table"
+ raise ValueError(msg) from e
+ else:
+ qtables = [lines[s : s + 64] for s in range(0, len(lines), 64)]
+ if isinstance(qtables, (tuple, list, dict)):
+ if isinstance(qtables, dict):
+ qtables = [
+ qtables[key] for key in range(len(qtables)) if key in qtables
+ ]
+ elif isinstance(qtables, tuple):
+ qtables = list(qtables)
+ if not (0 < len(qtables) < 5):
+ msg = "None or too many quantization tables"
+ raise ValueError(msg)
+ for idx, table in enumerate(qtables):
+ try:
+ if len(table) != 64:
+ raise TypeError
+ table = array.array("H", table)
+ except TypeError as e:
+ msg = "Invalid quantization table"
+ raise ValueError(msg) from e
+ else:
+ qtables[idx] = list(table)
+ return qtables
+
+ if qtables == "keep":
+ if im.format != "JPEG":
+ msg = "Cannot use 'keep' when original image is not a JPEG"
+ raise ValueError(msg)
+ qtables = getattr(im, "quantization", None)
+ qtables = validate_qtables(qtables)
+
+ extra = info.get("extra", b"")
+
+ MAX_BYTES_IN_MARKER = 65533
+ icc_profile = info.get("icc_profile")
+ if icc_profile:
+ ICC_OVERHEAD_LEN = 14
+ MAX_DATA_BYTES_IN_MARKER = MAX_BYTES_IN_MARKER - ICC_OVERHEAD_LEN
+ markers = []
+ while icc_profile:
+ markers.append(icc_profile[:MAX_DATA_BYTES_IN_MARKER])
+ icc_profile = icc_profile[MAX_DATA_BYTES_IN_MARKER:]
+ i = 1
+ for marker in markers:
+ size = o16(2 + ICC_OVERHEAD_LEN + len(marker))
+ extra += (
+ b"\xFF\xE2"
+ + size
+ + b"ICC_PROFILE\0"
+ + o8(i)
+ + o8(len(markers))
+ + marker
+ )
+ i += 1
+
+ comment = info.get("comment", im.info.get("comment"))
+
+ # "progressive" is the official name, but older documentation
+ # says "progression"
+ # FIXME: issue a warning if the wrong form is used (post-1.1.7)
+ progressive = info.get("progressive", False) or info.get("progression", False)
+
+ optimize = info.get("optimize", False)
+
+ exif = info.get("exif", b"")
+ if isinstance(exif, Image.Exif):
+ exif = exif.tobytes()
+ if len(exif) > MAX_BYTES_IN_MARKER:
+ msg = "EXIF data is too long"
+ raise ValueError(msg)
+
+ # get keyword arguments
+ im.encoderconfig = (
+ quality,
+ progressive,
+ info.get("smooth", 0),
+ optimize,
+ info.get("streamtype", 0),
+ dpi[0],
+ dpi[1],
+ subsampling,
+ qtables,
+ comment,
+ extra,
+ exif,
+ )
+
+ # if we optimize, libjpeg needs a buffer big enough to hold the whole image
+ # in a shot. Guessing on the size, at im.size bytes. (raw pixel size is
+ # channels*size, this is a value that's been used in a django patch.
+ # https://github.com/matthewwithanm/django-imagekit/issues/50
+ bufsize = 0
+ if optimize or progressive:
+ # CMYK can be bigger
+ if im.mode == "CMYK":
+ bufsize = 4 * im.size[0] * im.size[1]
+ # keep sets quality to -1, but the actual value may be high.
+ elif quality >= 95 or quality == -1:
+ bufsize = 2 * im.size[0] * im.size[1]
+ else:
+ bufsize = im.size[0] * im.size[1]
+ if exif:
+ bufsize += len(exif) + 5
+ if extra:
+ bufsize += len(extra) + 1
+ else:
+ # The EXIF info needs to be written as one block, + APP1, + one spare byte.
+ # Ensure that our buffer is big enough. Same with the icc_profile block.
+ bufsize = max(bufsize, len(exif) + 5, len(extra) + 1)
+
+ ImageFile._save(im, fp, [("jpeg", (0, 0) + im.size, 0, rawmode)], bufsize)
+
+
+def _save_cjpeg(im, fp, filename):
+ # ALTERNATIVE: handle JPEGs via the IJG command line utilities.
+ tempfile = im._dump()
+ subprocess.check_call(["cjpeg", "-outfile", filename, tempfile])
+ try:
+ os.unlink(tempfile)
+ except OSError:
+ pass
+
+
+##
+# Factory for making JPEG and MPO instances
+def jpeg_factory(fp=None, filename=None):
+ im = JpegImageFile(fp, filename)
+ try:
+ mpheader = im._getmp()
+ if mpheader[45057] > 1:
+ # It's actually an MPO
+ from .MpoImagePlugin import MpoImageFile
+
+ # Don't reload everything, just convert it.
+ im = MpoImageFile.adopt(im, mpheader)
+ except (TypeError, IndexError):
+ # It is really a JPEG
+ pass
+ except SyntaxError:
+ warnings.warn(
+ "Image appears to be a malformed MPO file, it will be "
+ "interpreted as a base JPEG file"
+ )
+ return im
+
+
+# ---------------------------------------------------------------------
+# Registry stuff
+
+Image.register_open(JpegImageFile.format, jpeg_factory, _accept)
+Image.register_save(JpegImageFile.format, _save)
+
+Image.register_extensions(JpegImageFile.format, [".jfif", ".jpe", ".jpg", ".jpeg"])
+
+Image.register_mime(JpegImageFile.format, "image/jpeg")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/JpegPresets.py b/Backend/venv/lib/python3.12/site-packages/PIL/JpegPresets.py
new file mode 100644
index 00000000..a678e248
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/JpegPresets.py
@@ -0,0 +1,240 @@
+"""
+JPEG quality settings equivalent to the Photoshop settings.
+Can be used when saving JPEG files.
+
+The following presets are available by default:
+``web_low``, ``web_medium``, ``web_high``, ``web_very_high``, ``web_maximum``,
+``low``, ``medium``, ``high``, ``maximum``.
+More presets can be added to the :py:data:`presets` dict if needed.
+
+To apply the preset, specify::
+
+ quality="preset_name"
+
+To apply only the quantization table::
+
+ qtables="preset_name"
+
+To apply only the subsampling setting::
+
+ subsampling="preset_name"
+
+Example::
+
+ im.save("image_name.jpg", quality="web_high")
+
+Subsampling
+-----------
+
+Subsampling is the practice of encoding images by implementing less resolution
+for chroma information than for luma information.
+(ref.: https://en.wikipedia.org/wiki/Chroma_subsampling)
+
+Possible subsampling values are 0, 1 and 2 that correspond to 4:4:4, 4:2:2 and
+4:2:0.
+
+You can get the subsampling of a JPEG with the
+:func:`.JpegImagePlugin.get_sampling` function.
+
+In JPEG compressed data a JPEG marker is used instead of an EXIF tag.
+(ref.: https://exiv2.org/tags.html)
+
+
+Quantization tables
+-------------------
+
+They are values use by the DCT (Discrete cosine transform) to remove
+*unnecessary* information from the image (the lossy part of the compression).
+(ref.: https://en.wikipedia.org/wiki/Quantization_matrix#Quantization_matrices,
+https://en.wikipedia.org/wiki/JPEG#Quantization)
+
+You can get the quantization tables of a JPEG with::
+
+ im.quantization
+
+This will return a dict with a number of lists. You can pass this dict
+directly as the qtables argument when saving a JPEG.
+
+The quantization table format in presets is a list with sublists. These formats
+are interchangeable.
+
+Libjpeg ref.:
+https://web.archive.org/web/20120328125543/http://www.jpegcameras.com/libjpeg/libjpeg-3.html
+
+"""
+
+# fmt: off
+presets = {
+ 'web_low': {'subsampling': 2, # "4:2:0"
+ 'quantization': [
+ [20, 16, 25, 39, 50, 46, 62, 68,
+ 16, 18, 23, 38, 38, 53, 65, 68,
+ 25, 23, 31, 38, 53, 65, 68, 68,
+ 39, 38, 38, 53, 65, 68, 68, 68,
+ 50, 38, 53, 65, 68, 68, 68, 68,
+ 46, 53, 65, 68, 68, 68, 68, 68,
+ 62, 65, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68],
+ [21, 25, 32, 38, 54, 68, 68, 68,
+ 25, 28, 24, 38, 54, 68, 68, 68,
+ 32, 24, 32, 43, 66, 68, 68, 68,
+ 38, 38, 43, 53, 68, 68, 68, 68,
+ 54, 54, 66, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68,
+ 68, 68, 68, 68, 68, 68, 68, 68]
+ ]},
+ 'web_medium': {'subsampling': 2, # "4:2:0"
+ 'quantization': [
+ [16, 11, 11, 16, 23, 27, 31, 30,
+ 11, 12, 12, 15, 20, 23, 23, 30,
+ 11, 12, 13, 16, 23, 26, 35, 47,
+ 16, 15, 16, 23, 26, 37, 47, 64,
+ 23, 20, 23, 26, 39, 51, 64, 64,
+ 27, 23, 26, 37, 51, 64, 64, 64,
+ 31, 23, 35, 47, 64, 64, 64, 64,
+ 30, 30, 47, 64, 64, 64, 64, 64],
+ [17, 15, 17, 21, 20, 26, 38, 48,
+ 15, 19, 18, 17, 20, 26, 35, 43,
+ 17, 18, 20, 22, 26, 30, 46, 53,
+ 21, 17, 22, 28, 30, 39, 53, 64,
+ 20, 20, 26, 30, 39, 48, 64, 64,
+ 26, 26, 30, 39, 48, 63, 64, 64,
+ 38, 35, 46, 53, 64, 64, 64, 64,
+ 48, 43, 53, 64, 64, 64, 64, 64]
+ ]},
+ 'web_high': {'subsampling': 0, # "4:4:4"
+ 'quantization': [
+ [6, 4, 4, 6, 9, 11, 12, 16,
+ 4, 5, 5, 6, 8, 10, 12, 12,
+ 4, 5, 5, 6, 10, 12, 14, 19,
+ 6, 6, 6, 11, 12, 15, 19, 28,
+ 9, 8, 10, 12, 16, 20, 27, 31,
+ 11, 10, 12, 15, 20, 27, 31, 31,
+ 12, 12, 14, 19, 27, 31, 31, 31,
+ 16, 12, 19, 28, 31, 31, 31, 31],
+ [7, 7, 13, 24, 26, 31, 31, 31,
+ 7, 12, 16, 21, 31, 31, 31, 31,
+ 13, 16, 17, 31, 31, 31, 31, 31,
+ 24, 21, 31, 31, 31, 31, 31, 31,
+ 26, 31, 31, 31, 31, 31, 31, 31,
+ 31, 31, 31, 31, 31, 31, 31, 31,
+ 31, 31, 31, 31, 31, 31, 31, 31,
+ 31, 31, 31, 31, 31, 31, 31, 31]
+ ]},
+ 'web_very_high': {'subsampling': 0, # "4:4:4"
+ 'quantization': [
+ [2, 2, 2, 2, 3, 4, 5, 6,
+ 2, 2, 2, 2, 3, 4, 5, 6,
+ 2, 2, 2, 2, 4, 5, 7, 9,
+ 2, 2, 2, 4, 5, 7, 9, 12,
+ 3, 3, 4, 5, 8, 10, 12, 12,
+ 4, 4, 5, 7, 10, 12, 12, 12,
+ 5, 5, 7, 9, 12, 12, 12, 12,
+ 6, 6, 9, 12, 12, 12, 12, 12],
+ [3, 3, 5, 9, 13, 15, 15, 15,
+ 3, 4, 6, 11, 14, 12, 12, 12,
+ 5, 6, 9, 14, 12, 12, 12, 12,
+ 9, 11, 14, 12, 12, 12, 12, 12,
+ 13, 14, 12, 12, 12, 12, 12, 12,
+ 15, 12, 12, 12, 12, 12, 12, 12,
+ 15, 12, 12, 12, 12, 12, 12, 12,
+ 15, 12, 12, 12, 12, 12, 12, 12]
+ ]},
+ 'web_maximum': {'subsampling': 0, # "4:4:4"
+ 'quantization': [
+ [1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 2,
+ 1, 1, 1, 1, 1, 1, 2, 2,
+ 1, 1, 1, 1, 1, 2, 2, 3,
+ 1, 1, 1, 1, 2, 2, 3, 3,
+ 1, 1, 1, 2, 2, 3, 3, 3,
+ 1, 1, 2, 2, 3, 3, 3, 3],
+ [1, 1, 1, 2, 2, 3, 3, 3,
+ 1, 1, 1, 2, 3, 3, 3, 3,
+ 1, 1, 1, 3, 3, 3, 3, 3,
+ 2, 2, 3, 3, 3, 3, 3, 3,
+ 2, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3]
+ ]},
+ 'low': {'subsampling': 2, # "4:2:0"
+ 'quantization': [
+ [18, 14, 14, 21, 30, 35, 34, 17,
+ 14, 16, 16, 19, 26, 23, 12, 12,
+ 14, 16, 17, 21, 23, 12, 12, 12,
+ 21, 19, 21, 23, 12, 12, 12, 12,
+ 30, 26, 23, 12, 12, 12, 12, 12,
+ 35, 23, 12, 12, 12, 12, 12, 12,
+ 34, 12, 12, 12, 12, 12, 12, 12,
+ 17, 12, 12, 12, 12, 12, 12, 12],
+ [20, 19, 22, 27, 20, 20, 17, 17,
+ 19, 25, 23, 14, 14, 12, 12, 12,
+ 22, 23, 14, 14, 12, 12, 12, 12,
+ 27, 14, 14, 12, 12, 12, 12, 12,
+ 20, 14, 12, 12, 12, 12, 12, 12,
+ 20, 12, 12, 12, 12, 12, 12, 12,
+ 17, 12, 12, 12, 12, 12, 12, 12,
+ 17, 12, 12, 12, 12, 12, 12, 12]
+ ]},
+ 'medium': {'subsampling': 2, # "4:2:0"
+ 'quantization': [
+ [12, 8, 8, 12, 17, 21, 24, 17,
+ 8, 9, 9, 11, 15, 19, 12, 12,
+ 8, 9, 10, 12, 19, 12, 12, 12,
+ 12, 11, 12, 21, 12, 12, 12, 12,
+ 17, 15, 19, 12, 12, 12, 12, 12,
+ 21, 19, 12, 12, 12, 12, 12, 12,
+ 24, 12, 12, 12, 12, 12, 12, 12,
+ 17, 12, 12, 12, 12, 12, 12, 12],
+ [13, 11, 13, 16, 20, 20, 17, 17,
+ 11, 14, 14, 14, 14, 12, 12, 12,
+ 13, 14, 14, 14, 12, 12, 12, 12,
+ 16, 14, 14, 12, 12, 12, 12, 12,
+ 20, 14, 12, 12, 12, 12, 12, 12,
+ 20, 12, 12, 12, 12, 12, 12, 12,
+ 17, 12, 12, 12, 12, 12, 12, 12,
+ 17, 12, 12, 12, 12, 12, 12, 12]
+ ]},
+ 'high': {'subsampling': 0, # "4:4:4"
+ 'quantization': [
+ [6, 4, 4, 6, 9, 11, 12, 16,
+ 4, 5, 5, 6, 8, 10, 12, 12,
+ 4, 5, 5, 6, 10, 12, 12, 12,
+ 6, 6, 6, 11, 12, 12, 12, 12,
+ 9, 8, 10, 12, 12, 12, 12, 12,
+ 11, 10, 12, 12, 12, 12, 12, 12,
+ 12, 12, 12, 12, 12, 12, 12, 12,
+ 16, 12, 12, 12, 12, 12, 12, 12],
+ [7, 7, 13, 24, 20, 20, 17, 17,
+ 7, 12, 16, 14, 14, 12, 12, 12,
+ 13, 16, 14, 14, 12, 12, 12, 12,
+ 24, 14, 14, 12, 12, 12, 12, 12,
+ 20, 14, 12, 12, 12, 12, 12, 12,
+ 20, 12, 12, 12, 12, 12, 12, 12,
+ 17, 12, 12, 12, 12, 12, 12, 12,
+ 17, 12, 12, 12, 12, 12, 12, 12]
+ ]},
+ 'maximum': {'subsampling': 0, # "4:4:4"
+ 'quantization': [
+ [2, 2, 2, 2, 3, 4, 5, 6,
+ 2, 2, 2, 2, 3, 4, 5, 6,
+ 2, 2, 2, 2, 4, 5, 7, 9,
+ 2, 2, 2, 4, 5, 7, 9, 12,
+ 3, 3, 4, 5, 8, 10, 12, 12,
+ 4, 4, 5, 7, 10, 12, 12, 12,
+ 5, 5, 7, 9, 12, 12, 12, 12,
+ 6, 6, 9, 12, 12, 12, 12, 12],
+ [3, 3, 5, 9, 13, 15, 15, 15,
+ 3, 4, 6, 10, 14, 12, 12, 12,
+ 5, 6, 9, 14, 12, 12, 12, 12,
+ 9, 10, 14, 12, 12, 12, 12, 12,
+ 13, 14, 12, 12, 12, 12, 12, 12,
+ 15, 12, 12, 12, 12, 12, 12, 12,
+ 15, 12, 12, 12, 12, 12, 12, 12,
+ 15, 12, 12, 12, 12, 12, 12, 12]
+ ]},
+}
+# fmt: on
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/McIdasImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/McIdasImagePlugin.py
new file mode 100644
index 00000000..bb79e71d
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/McIdasImagePlugin.py
@@ -0,0 +1,75 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# Basic McIdas support for PIL
+#
+# History:
+# 1997-05-05 fl Created (8-bit images only)
+# 2009-03-08 fl Added 16/32-bit support.
+#
+# Thanks to Richard Jones and Craig Swank for specs and samples.
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1997.
+#
+# See the README file for information on usage and redistribution.
+#
+
+import struct
+
+from . import Image, ImageFile
+
+
+def _accept(s):
+ return s[:8] == b"\x00\x00\x00\x00\x00\x00\x00\x04"
+
+
+##
+# Image plugin for McIdas area images.
+
+
+class McIdasImageFile(ImageFile.ImageFile):
+ format = "MCIDAS"
+ format_description = "McIdas area file"
+
+ def _open(self):
+ # parse area file directory
+ s = self.fp.read(256)
+ if not _accept(s) or len(s) != 256:
+ msg = "not an McIdas area file"
+ raise SyntaxError(msg)
+
+ self.area_descriptor_raw = s
+ self.area_descriptor = w = [0] + list(struct.unpack("!64i", s))
+
+ # get mode
+ if w[11] == 1:
+ mode = rawmode = "L"
+ elif w[11] == 2:
+ # FIXME: add memory map support
+ mode = "I"
+ rawmode = "I;16B"
+ elif w[11] == 4:
+ # FIXME: add memory map support
+ mode = "I"
+ rawmode = "I;32B"
+ else:
+ msg = "unsupported McIdas format"
+ raise SyntaxError(msg)
+
+ self._mode = mode
+ self._size = w[10], w[9]
+
+ offset = w[34] + w[15]
+ stride = w[15] + w[10] * w[11] * w[14]
+
+ self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride, 1))]
+
+
+# --------------------------------------------------------------------
+# registry
+
+Image.register_open(McIdasImageFile.format, McIdasImageFile, _accept)
+
+# no default extension
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/MicImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/MicImagePlugin.py
new file mode 100644
index 00000000..80131893
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/MicImagePlugin.py
@@ -0,0 +1,103 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# Microsoft Image Composer support for PIL
+#
+# Notes:
+# uses TiffImagePlugin.py to read the actual image streams
+#
+# History:
+# 97-01-20 fl Created
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1997.
+#
+# See the README file for information on usage and redistribution.
+#
+
+
+import olefile
+
+from . import Image, TiffImagePlugin
+
+#
+# --------------------------------------------------------------------
+
+
+def _accept(prefix):
+ return prefix[:8] == olefile.MAGIC
+
+
+##
+# Image plugin for Microsoft's Image Composer file format.
+
+
+class MicImageFile(TiffImagePlugin.TiffImageFile):
+ format = "MIC"
+ format_description = "Microsoft Image Composer"
+ _close_exclusive_fp_after_loading = False
+
+ def _open(self):
+ # read the OLE directory and see if this is a likely
+ # to be a Microsoft Image Composer file
+
+ try:
+ self.ole = olefile.OleFileIO(self.fp)
+ except OSError as e:
+ msg = "not an MIC file; invalid OLE file"
+ raise SyntaxError(msg) from e
+
+ # find ACI subfiles with Image members (maybe not the
+ # best way to identify MIC files, but what the... ;-)
+
+ self.images = []
+ for path in self.ole.listdir():
+ if path[1:] and path[0][-4:] == ".ACI" and path[1] == "Image":
+ self.images.append(path)
+
+ # if we didn't find any images, this is probably not
+ # an MIC file.
+ if not self.images:
+ msg = "not an MIC file; no image entries"
+ raise SyntaxError(msg)
+
+ self.frame = None
+ self._n_frames = len(self.images)
+ self.is_animated = self._n_frames > 1
+
+ self.seek(0)
+
+ def seek(self, frame):
+ if not self._seek_check(frame):
+ return
+ try:
+ filename = self.images[frame]
+ except IndexError as e:
+ msg = "no such frame"
+ raise EOFError(msg) from e
+
+ self.fp = self.ole.openstream(filename)
+
+ TiffImagePlugin.TiffImageFile._open(self)
+
+ self.frame = frame
+
+ def tell(self):
+ return self.frame
+
+ def close(self):
+ self.ole.close()
+ super().close()
+
+ def __exit__(self, *args):
+ self.ole.close()
+ super().__exit__()
+
+
+#
+# --------------------------------------------------------------------
+
+Image.register_open(MicImageFile.format, MicImageFile, _accept)
+
+Image.register_extension(MicImageFile.format, ".mic")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/MpegImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/MpegImagePlugin.py
new file mode 100644
index 00000000..bfa88fe9
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/MpegImagePlugin.py
@@ -0,0 +1,82 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# MPEG file handling
+#
+# History:
+# 95-09-09 fl Created
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1995.
+#
+# See the README file for information on usage and redistribution.
+#
+
+
+from . import Image, ImageFile
+from ._binary import i8
+
+#
+# Bitstream parser
+
+
+class BitStream:
+ def __init__(self, fp):
+ self.fp = fp
+ self.bits = 0
+ self.bitbuffer = 0
+
+ def next(self):
+ return i8(self.fp.read(1))
+
+ def peek(self, bits):
+ while self.bits < bits:
+ c = self.next()
+ if c < 0:
+ self.bits = 0
+ continue
+ self.bitbuffer = (self.bitbuffer << 8) + c
+ self.bits += 8
+ return self.bitbuffer >> (self.bits - bits) & (1 << bits) - 1
+
+ def skip(self, bits):
+ while self.bits < bits:
+ self.bitbuffer = (self.bitbuffer << 8) + i8(self.fp.read(1))
+ self.bits += 8
+ self.bits = self.bits - bits
+
+ def read(self, bits):
+ v = self.peek(bits)
+ self.bits = self.bits - bits
+ return v
+
+
+##
+# Image plugin for MPEG streams. This plugin can identify a stream,
+# but it cannot read it.
+
+
+class MpegImageFile(ImageFile.ImageFile):
+ format = "MPEG"
+ format_description = "MPEG"
+
+ def _open(self):
+ s = BitStream(self.fp)
+
+ if s.read(32) != 0x1B3:
+ msg = "not an MPEG file"
+ raise SyntaxError(msg)
+
+ self._mode = "RGB"
+ self._size = s.read(12), s.read(12)
+
+
+# --------------------------------------------------------------------
+# Registry stuff
+
+Image.register_open(MpegImageFile.format, MpegImageFile)
+
+Image.register_extensions(MpegImageFile.format, [".mpg", ".mpeg"])
+
+Image.register_mime(MpegImageFile.format, "video/mpeg")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/MpoImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/MpoImagePlugin.py
new file mode 100644
index 00000000..f9261c77
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/MpoImagePlugin.py
@@ -0,0 +1,197 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# MPO file handling
+#
+# See "Multi-Picture Format" (CIPA DC-007-Translation 2009, Standard of the
+# Camera & Imaging Products Association)
+#
+# The multi-picture object combines multiple JPEG images (with a modified EXIF
+# data format) into a single file. While it can theoretically be used much like
+# a GIF animation, it is commonly used to represent 3D photographs and is (as
+# of this writing) the most commonly used format by 3D cameras.
+#
+# History:
+# 2014-03-13 Feneric Created
+#
+# See the README file for information on usage and redistribution.
+#
+
+import itertools
+import os
+import struct
+
+from . import (
+ ExifTags,
+ Image,
+ ImageFile,
+ ImageSequence,
+ JpegImagePlugin,
+ TiffImagePlugin,
+)
+from ._binary import i16be as i16
+from ._binary import o32le
+
+# def _accept(prefix):
+# return JpegImagePlugin._accept(prefix)
+
+
+def _save(im, fp, filename):
+ JpegImagePlugin._save(im, fp, filename)
+
+
+def _save_all(im, fp, filename):
+ append_images = im.encoderinfo.get("append_images", [])
+ if not append_images:
+ try:
+ animated = im.is_animated
+ except AttributeError:
+ animated = False
+ if not animated:
+ _save(im, fp, filename)
+ return
+
+ mpf_offset = 28
+ offsets = []
+ for imSequence in itertools.chain([im], append_images):
+ for im_frame in ImageSequence.Iterator(imSequence):
+ if not offsets:
+ # APP2 marker
+ im_frame.encoderinfo["extra"] = (
+ b"\xFF\xE2" + struct.pack(">H", 6 + 82) + b"MPF\0" + b" " * 82
+ )
+ exif = im_frame.encoderinfo.get("exif")
+ if isinstance(exif, Image.Exif):
+ exif = exif.tobytes()
+ im_frame.encoderinfo["exif"] = exif
+ if exif:
+ mpf_offset += 4 + len(exif)
+
+ JpegImagePlugin._save(im_frame, fp, filename)
+ offsets.append(fp.tell())
+ else:
+ im_frame.save(fp, "JPEG")
+ offsets.append(fp.tell() - offsets[-1])
+
+ ifd = TiffImagePlugin.ImageFileDirectory_v2()
+ ifd[0xB000] = b"0100"
+ ifd[0xB001] = len(offsets)
+
+ mpentries = b""
+ data_offset = 0
+ for i, size in enumerate(offsets):
+ if i == 0:
+ mptype = 0x030000 # Baseline MP Primary Image
+ else:
+ mptype = 0x000000 # Undefined
+ mpentries += struct.pack(" 1
+ self._fp = self.fp # FIXME: hack
+ self._fp.seek(self.__mpoffsets[0]) # get ready to read first frame
+ self.__frame = 0
+ self.offset = 0
+ # for now we can only handle reading and individual frame extraction
+ self.readonly = 1
+
+ def load_seek(self, pos):
+ self._fp.seek(pos)
+
+ def seek(self, frame):
+ if not self._seek_check(frame):
+ return
+ self.fp = self._fp
+ self.offset = self.__mpoffsets[frame]
+
+ self.fp.seek(self.offset + 2) # skip SOI marker
+ segment = self.fp.read(2)
+ if not segment:
+ msg = "No data found for frame"
+ raise ValueError(msg)
+ self._size = self._initial_size
+ if i16(segment) == 0xFFE1: # APP1
+ n = i16(self.fp.read(2)) - 2
+ self.info["exif"] = ImageFile._safe_read(self.fp, n)
+ self._reload_exif()
+
+ mptype = self.mpinfo[0xB002][frame]["Attribute"]["MPType"]
+ if mptype.startswith("Large Thumbnail"):
+ exif = self.getexif().get_ifd(ExifTags.IFD.Exif)
+ if 40962 in exif and 40963 in exif:
+ self._size = (exif[40962], exif[40963])
+ elif "exif" in self.info:
+ del self.info["exif"]
+ self._reload_exif()
+
+ self.tile = [("jpeg", (0, 0) + self.size, self.offset, (self.mode, ""))]
+ self.__frame = frame
+
+ def tell(self):
+ return self.__frame
+
+ @staticmethod
+ def adopt(jpeg_instance, mpheader=None):
+ """
+ Transform the instance of JpegImageFile into
+ an instance of MpoImageFile.
+ After the call, the JpegImageFile is extended
+ to be an MpoImageFile.
+
+ This is essentially useful when opening a JPEG
+ file that reveals itself as an MPO, to avoid
+ double call to _open.
+ """
+ jpeg_instance.__class__ = MpoImageFile
+ jpeg_instance._after_jpeg_open(mpheader)
+ return jpeg_instance
+
+
+# ---------------------------------------------------------------------
+# Registry stuff
+
+# Note that since MPO shares a factory with JPEG, we do not need to do a
+# separate registration for it here.
+# Image.register_open(MpoImageFile.format,
+# JpegImagePlugin.jpeg_factory, _accept)
+Image.register_save(MpoImageFile.format, _save)
+Image.register_save_all(MpoImageFile.format, _save_all)
+
+Image.register_extension(MpoImageFile.format, ".mpo")
+
+Image.register_mime(MpoImageFile.format, "image/mpo")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/MspImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/MspImagePlugin.py
new file mode 100644
index 00000000..3f3609f1
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/MspImagePlugin.py
@@ -0,0 +1,194 @@
+#
+# The Python Imaging Library.
+#
+# MSP file handling
+#
+# This is the format used by the Paint program in Windows 1 and 2.
+#
+# History:
+# 95-09-05 fl Created
+# 97-01-03 fl Read/write MSP images
+# 17-02-21 es Fixed RLE interpretation
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1995-97.
+# Copyright (c) Eric Soroos 2017.
+#
+# See the README file for information on usage and redistribution.
+#
+# More info on this format: https://archive.org/details/gg243631
+# Page 313:
+# Figure 205. Windows Paint Version 1: "DanM" Format
+# Figure 206. Windows Paint Version 2: "LinS" Format. Used in Windows V2.03
+#
+# See also: https://www.fileformat.info/format/mspaint/egff.htm
+
+import io
+import struct
+
+from . import Image, ImageFile
+from ._binary import i16le as i16
+from ._binary import o16le as o16
+
+#
+# read MSP files
+
+
+def _accept(prefix):
+ return prefix[:4] in [b"DanM", b"LinS"]
+
+
+##
+# Image plugin for Windows MSP images. This plugin supports both
+# uncompressed (Windows 1.0).
+
+
+class MspImageFile(ImageFile.ImageFile):
+ format = "MSP"
+ format_description = "Windows Paint"
+
+ def _open(self):
+ # Header
+ s = self.fp.read(32)
+ if not _accept(s):
+ msg = "not an MSP file"
+ raise SyntaxError(msg)
+
+ # Header checksum
+ checksum = 0
+ for i in range(0, 32, 2):
+ checksum = checksum ^ i16(s, i)
+ if checksum != 0:
+ msg = "bad MSP checksum"
+ raise SyntaxError(msg)
+
+ self._mode = "1"
+ self._size = i16(s, 4), i16(s, 6)
+
+ if s[:4] == b"DanM":
+ self.tile = [("raw", (0, 0) + self.size, 32, ("1", 0, 1))]
+ else:
+ self.tile = [("MSP", (0, 0) + self.size, 32, None)]
+
+
+class MspDecoder(ImageFile.PyDecoder):
+ # The algo for the MSP decoder is from
+ # https://www.fileformat.info/format/mspaint/egff.htm
+ # cc-by-attribution -- That page references is taken from the
+ # Encyclopedia of Graphics File Formats and is licensed by
+ # O'Reilly under the Creative Common/Attribution license
+ #
+ # For RLE encoded files, the 32byte header is followed by a scan
+ # line map, encoded as one 16bit word of encoded byte length per
+ # line.
+ #
+ # NOTE: the encoded length of the line can be 0. This was not
+ # handled in the previous version of this encoder, and there's no
+ # mention of how to handle it in the documentation. From the few
+ # examples I've seen, I've assumed that it is a fill of the
+ # background color, in this case, white.
+ #
+ #
+ # Pseudocode of the decoder:
+ # Read a BYTE value as the RunType
+ # If the RunType value is zero
+ # Read next byte as the RunCount
+ # Read the next byte as the RunValue
+ # Write the RunValue byte RunCount times
+ # If the RunType value is non-zero
+ # Use this value as the RunCount
+ # Read and write the next RunCount bytes literally
+ #
+ # e.g.:
+ # 0x00 03 ff 05 00 01 02 03 04
+ # would yield the bytes:
+ # 0xff ff ff 00 01 02 03 04
+ #
+ # which are then interpreted as a bit packed mode '1' image
+
+ _pulls_fd = True
+
+ def decode(self, buffer):
+ img = io.BytesIO()
+ blank_line = bytearray((0xFF,) * ((self.state.xsize + 7) // 8))
+ try:
+ self.fd.seek(32)
+ rowmap = struct.unpack_from(
+ f"<{self.state.ysize}H", self.fd.read(self.state.ysize * 2)
+ )
+ except struct.error as e:
+ msg = "Truncated MSP file in row map"
+ raise OSError(msg) from e
+
+ for x, rowlen in enumerate(rowmap):
+ try:
+ if rowlen == 0:
+ img.write(blank_line)
+ continue
+ row = self.fd.read(rowlen)
+ if len(row) != rowlen:
+ msg = f"Truncated MSP file, expected {rowlen} bytes on row {x}"
+ raise OSError(msg)
+ idx = 0
+ while idx < rowlen:
+ runtype = row[idx]
+ idx += 1
+ if runtype == 0:
+ (runcount, runval) = struct.unpack_from("Bc", row, idx)
+ img.write(runval * runcount)
+ idx += 2
+ else:
+ runcount = runtype
+ img.write(row[idx : idx + runcount])
+ idx += runcount
+
+ except struct.error as e:
+ msg = f"Corrupted MSP file in row {x}"
+ raise OSError(msg) from e
+
+ self.set_as_raw(img.getvalue(), ("1", 0, 1))
+
+ return -1, 0
+
+
+Image.register_decoder("MSP", MspDecoder)
+
+
+#
+# write MSP files (uncompressed only)
+
+
+def _save(im, fp, filename):
+ if im.mode != "1":
+ msg = f"cannot write mode {im.mode} as MSP"
+ raise OSError(msg)
+
+ # create MSP header
+ header = [0] * 16
+
+ header[0], header[1] = i16(b"Da"), i16(b"nM") # version 1
+ header[2], header[3] = im.size
+ header[4], header[5] = 1, 1
+ header[6], header[7] = 1, 1
+ header[8], header[9] = im.size
+
+ checksum = 0
+ for h in header:
+ checksum = checksum ^ h
+ header[12] = checksum # FIXME: is this the right field?
+
+ # header
+ for h in header:
+ fp.write(o16(h))
+
+ # image body
+ ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 32, ("1", 0, 1))])
+
+
+#
+# registry
+
+Image.register_open(MspImageFile.format, MspImageFile, _accept)
+Image.register_save(MspImageFile.format, _save)
+
+Image.register_extension(MspImageFile.format, ".msp")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/PSDraw.py b/Backend/venv/lib/python3.12/site-packages/PIL/PSDraw.py
new file mode 100644
index 00000000..13b3048f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/PSDraw.py
@@ -0,0 +1,229 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# Simple PostScript graphics interface
+#
+# History:
+# 1996-04-20 fl Created
+# 1999-01-10 fl Added gsave/grestore to image method
+# 2005-05-04 fl Fixed floating point issue in image (from Eric Etheridge)
+#
+# Copyright (c) 1997-2005 by Secret Labs AB. All rights reserved.
+# Copyright (c) 1996 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+
+import sys
+
+from . import EpsImagePlugin
+
+##
+# Simple PostScript graphics interface.
+
+
+class PSDraw:
+ """
+ Sets up printing to the given file. If ``fp`` is omitted,
+ ``sys.stdout.buffer`` or ``sys.stdout`` is assumed.
+ """
+
+ def __init__(self, fp=None):
+ if not fp:
+ try:
+ fp = sys.stdout.buffer
+ except AttributeError:
+ fp = sys.stdout
+ self.fp = fp
+
+ def begin_document(self, id=None):
+ """Set up printing of a document. (Write PostScript DSC header.)"""
+ # FIXME: incomplete
+ self.fp.write(
+ b"%!PS-Adobe-3.0\n"
+ b"save\n"
+ b"/showpage { } def\n"
+ b"%%EndComments\n"
+ b"%%BeginDocument\n"
+ )
+ # self.fp.write(ERROR_PS) # debugging!
+ self.fp.write(EDROFF_PS)
+ self.fp.write(VDI_PS)
+ self.fp.write(b"%%EndProlog\n")
+ self.isofont = {}
+
+ def end_document(self):
+ """Ends printing. (Write PostScript DSC footer.)"""
+ self.fp.write(b"%%EndDocument\nrestore showpage\n%%End\n")
+ if hasattr(self.fp, "flush"):
+ self.fp.flush()
+
+ def setfont(self, font, size):
+ """
+ Selects which font to use.
+
+ :param font: A PostScript font name
+ :param size: Size in points.
+ """
+ font = bytes(font, "UTF-8")
+ if font not in self.isofont:
+ # reencode font
+ self.fp.write(b"/PSDraw-%s ISOLatin1Encoding /%s E\n" % (font, font))
+ self.isofont[font] = 1
+ # rough
+ self.fp.write(b"/F0 %d /PSDraw-%s F\n" % (size, font))
+
+ def line(self, xy0, xy1):
+ """
+ Draws a line between the two points. Coordinates are given in
+ PostScript point coordinates (72 points per inch, (0, 0) is the lower
+ left corner of the page).
+ """
+ self.fp.write(b"%d %d %d %d Vl\n" % (*xy0, *xy1))
+
+ def rectangle(self, box):
+ """
+ Draws a rectangle.
+
+ :param box: A tuple of four integers, specifying left, bottom, width and
+ height.
+ """
+ self.fp.write(b"%d %d M 0 %d %d Vr\n" % box)
+
+ def text(self, xy, text):
+ """
+ Draws text at the given position. You must use
+ :py:meth:`~PIL.PSDraw.PSDraw.setfont` before calling this method.
+ """
+ text = bytes(text, "UTF-8")
+ text = b"\\(".join(text.split(b"("))
+ text = b"\\)".join(text.split(b")"))
+ xy += (text,)
+ self.fp.write(b"%d %d M (%s) S\n" % xy)
+
+ def image(self, box, im, dpi=None):
+ """Draw a PIL image, centered in the given box."""
+ # default resolution depends on mode
+ if not dpi:
+ if im.mode == "1":
+ dpi = 200 # fax
+ else:
+ dpi = 100 # greyscale
+ # image size (on paper)
+ x = im.size[0] * 72 / dpi
+ y = im.size[1] * 72 / dpi
+ # max allowed size
+ xmax = float(box[2] - box[0])
+ ymax = float(box[3] - box[1])
+ if x > xmax:
+ y = y * xmax / x
+ x = xmax
+ if y > ymax:
+ x = x * ymax / y
+ y = ymax
+ dx = (xmax - x) / 2 + box[0]
+ dy = (ymax - y) / 2 + box[1]
+ self.fp.write(b"gsave\n%f %f translate\n" % (dx, dy))
+ if (x, y) != im.size:
+ # EpsImagePlugin._save prints the image at (0,0,xsize,ysize)
+ sx = x / im.size[0]
+ sy = y / im.size[1]
+ self.fp.write(b"%f %f scale\n" % (sx, sy))
+ EpsImagePlugin._save(im, self.fp, None, 0)
+ self.fp.write(b"\ngrestore\n")
+
+
+# --------------------------------------------------------------------
+# PostScript driver
+
+#
+# EDROFF.PS -- PostScript driver for Edroff 2
+#
+# History:
+# 94-01-25 fl: created (edroff 2.04)
+#
+# Copyright (c) Fredrik Lundh 1994.
+#
+
+
+EDROFF_PS = b"""\
+/S { show } bind def
+/P { moveto show } bind def
+/M { moveto } bind def
+/X { 0 rmoveto } bind def
+/Y { 0 exch rmoveto } bind def
+/E { findfont
+ dup maxlength dict begin
+ {
+ 1 index /FID ne { def } { pop pop } ifelse
+ } forall
+ /Encoding exch def
+ dup /FontName exch def
+ currentdict end definefont pop
+} bind def
+/F { findfont exch scalefont dup setfont
+ [ exch /setfont cvx ] cvx bind def
+} bind def
+"""
+
+#
+# VDI.PS -- PostScript driver for VDI meta commands
+#
+# History:
+# 94-01-25 fl: created (edroff 2.04)
+#
+# Copyright (c) Fredrik Lundh 1994.
+#
+
+VDI_PS = b"""\
+/Vm { moveto } bind def
+/Va { newpath arcn stroke } bind def
+/Vl { moveto lineto stroke } bind def
+/Vc { newpath 0 360 arc closepath } bind def
+/Vr { exch dup 0 rlineto
+ exch dup 0 exch rlineto
+ exch neg 0 rlineto
+ 0 exch neg rlineto
+ setgray fill } bind def
+/Tm matrix def
+/Ve { Tm currentmatrix pop
+ translate scale newpath 0 0 .5 0 360 arc closepath
+ Tm setmatrix
+} bind def
+/Vf { currentgray exch setgray fill setgray } bind def
+"""
+
+#
+# ERROR.PS -- Error handler
+#
+# History:
+# 89-11-21 fl: created (pslist 1.10)
+#
+
+ERROR_PS = b"""\
+/landscape false def
+/errorBUF 200 string def
+/errorNL { currentpoint 10 sub exch pop 72 exch moveto } def
+errordict begin /handleerror {
+ initmatrix /Courier findfont 10 scalefont setfont
+ newpath 72 720 moveto $error begin /newerror false def
+ (PostScript Error) show errorNL errorNL
+ (Error: ) show
+ /errorname load errorBUF cvs show errorNL errorNL
+ (Command: ) show
+ /command load dup type /stringtype ne { errorBUF cvs } if show
+ errorNL errorNL
+ (VMstatus: ) show
+ vmstatus errorBUF cvs show ( bytes available, ) show
+ errorBUF cvs show ( bytes used at level ) show
+ errorBUF cvs show errorNL errorNL
+ (Operand stargck: ) show errorNL /ostargck load {
+ dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL
+ } forall errorNL
+ (Execution stargck: ) show errorNL /estargck load {
+ dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL
+ } forall
+ end showpage
+} def end
+"""
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/PaletteFile.py b/Backend/venv/lib/python3.12/site-packages/PIL/PaletteFile.py
new file mode 100644
index 00000000..4a2c497f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/PaletteFile.py
@@ -0,0 +1,51 @@
+#
+# Python Imaging Library
+# $Id$
+#
+# stuff to read simple, teragon-style palette files
+#
+# History:
+# 97-08-23 fl Created
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1997.
+#
+# See the README file for information on usage and redistribution.
+#
+
+from ._binary import o8
+
+
+class PaletteFile:
+ """File handler for Teragon-style palette files."""
+
+ rawmode = "RGB"
+
+ def __init__(self, fp):
+ self.palette = [(i, i, i) for i in range(256)]
+
+ while True:
+ s = fp.readline()
+
+ if not s:
+ break
+ if s[:1] == b"#":
+ continue
+ if len(s) > 100:
+ msg = "bad palette file"
+ raise SyntaxError(msg)
+
+ v = [int(x) for x in s.split()]
+ try:
+ [i, r, g, b] = v
+ except ValueError:
+ [i, r] = v
+ g = b = r
+
+ if 0 <= i <= 255:
+ self.palette[i] = o8(r) + o8(g) + o8(b)
+
+ self.palette = b"".join(self.palette)
+
+ def getpalette(self):
+ return self.palette, self.rawmode
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/PalmImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/PalmImagePlugin.py
new file mode 100644
index 00000000..a88a9079
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/PalmImagePlugin.py
@@ -0,0 +1,225 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+
+##
+# Image plugin for Palm pixmap images (output only).
+##
+
+from . import Image, ImageFile
+from ._binary import o8
+from ._binary import o16be as o16b
+
+# fmt: off
+_Palm8BitColormapValues = (
+ (255, 255, 255), (255, 204, 255), (255, 153, 255), (255, 102, 255),
+ (255, 51, 255), (255, 0, 255), (255, 255, 204), (255, 204, 204),
+ (255, 153, 204), (255, 102, 204), (255, 51, 204), (255, 0, 204),
+ (255, 255, 153), (255, 204, 153), (255, 153, 153), (255, 102, 153),
+ (255, 51, 153), (255, 0, 153), (204, 255, 255), (204, 204, 255),
+ (204, 153, 255), (204, 102, 255), (204, 51, 255), (204, 0, 255),
+ (204, 255, 204), (204, 204, 204), (204, 153, 204), (204, 102, 204),
+ (204, 51, 204), (204, 0, 204), (204, 255, 153), (204, 204, 153),
+ (204, 153, 153), (204, 102, 153), (204, 51, 153), (204, 0, 153),
+ (153, 255, 255), (153, 204, 255), (153, 153, 255), (153, 102, 255),
+ (153, 51, 255), (153, 0, 255), (153, 255, 204), (153, 204, 204),
+ (153, 153, 204), (153, 102, 204), (153, 51, 204), (153, 0, 204),
+ (153, 255, 153), (153, 204, 153), (153, 153, 153), (153, 102, 153),
+ (153, 51, 153), (153, 0, 153), (102, 255, 255), (102, 204, 255),
+ (102, 153, 255), (102, 102, 255), (102, 51, 255), (102, 0, 255),
+ (102, 255, 204), (102, 204, 204), (102, 153, 204), (102, 102, 204),
+ (102, 51, 204), (102, 0, 204), (102, 255, 153), (102, 204, 153),
+ (102, 153, 153), (102, 102, 153), (102, 51, 153), (102, 0, 153),
+ (51, 255, 255), (51, 204, 255), (51, 153, 255), (51, 102, 255),
+ (51, 51, 255), (51, 0, 255), (51, 255, 204), (51, 204, 204),
+ (51, 153, 204), (51, 102, 204), (51, 51, 204), (51, 0, 204),
+ (51, 255, 153), (51, 204, 153), (51, 153, 153), (51, 102, 153),
+ (51, 51, 153), (51, 0, 153), (0, 255, 255), (0, 204, 255),
+ (0, 153, 255), (0, 102, 255), (0, 51, 255), (0, 0, 255),
+ (0, 255, 204), (0, 204, 204), (0, 153, 204), (0, 102, 204),
+ (0, 51, 204), (0, 0, 204), (0, 255, 153), (0, 204, 153),
+ (0, 153, 153), (0, 102, 153), (0, 51, 153), (0, 0, 153),
+ (255, 255, 102), (255, 204, 102), (255, 153, 102), (255, 102, 102),
+ (255, 51, 102), (255, 0, 102), (255, 255, 51), (255, 204, 51),
+ (255, 153, 51), (255, 102, 51), (255, 51, 51), (255, 0, 51),
+ (255, 255, 0), (255, 204, 0), (255, 153, 0), (255, 102, 0),
+ (255, 51, 0), (255, 0, 0), (204, 255, 102), (204, 204, 102),
+ (204, 153, 102), (204, 102, 102), (204, 51, 102), (204, 0, 102),
+ (204, 255, 51), (204, 204, 51), (204, 153, 51), (204, 102, 51),
+ (204, 51, 51), (204, 0, 51), (204, 255, 0), (204, 204, 0),
+ (204, 153, 0), (204, 102, 0), (204, 51, 0), (204, 0, 0),
+ (153, 255, 102), (153, 204, 102), (153, 153, 102), (153, 102, 102),
+ (153, 51, 102), (153, 0, 102), (153, 255, 51), (153, 204, 51),
+ (153, 153, 51), (153, 102, 51), (153, 51, 51), (153, 0, 51),
+ (153, 255, 0), (153, 204, 0), (153, 153, 0), (153, 102, 0),
+ (153, 51, 0), (153, 0, 0), (102, 255, 102), (102, 204, 102),
+ (102, 153, 102), (102, 102, 102), (102, 51, 102), (102, 0, 102),
+ (102, 255, 51), (102, 204, 51), (102, 153, 51), (102, 102, 51),
+ (102, 51, 51), (102, 0, 51), (102, 255, 0), (102, 204, 0),
+ (102, 153, 0), (102, 102, 0), (102, 51, 0), (102, 0, 0),
+ (51, 255, 102), (51, 204, 102), (51, 153, 102), (51, 102, 102),
+ (51, 51, 102), (51, 0, 102), (51, 255, 51), (51, 204, 51),
+ (51, 153, 51), (51, 102, 51), (51, 51, 51), (51, 0, 51),
+ (51, 255, 0), (51, 204, 0), (51, 153, 0), (51, 102, 0),
+ (51, 51, 0), (51, 0, 0), (0, 255, 102), (0, 204, 102),
+ (0, 153, 102), (0, 102, 102), (0, 51, 102), (0, 0, 102),
+ (0, 255, 51), (0, 204, 51), (0, 153, 51), (0, 102, 51),
+ (0, 51, 51), (0, 0, 51), (0, 255, 0), (0, 204, 0),
+ (0, 153, 0), (0, 102, 0), (0, 51, 0), (17, 17, 17),
+ (34, 34, 34), (68, 68, 68), (85, 85, 85), (119, 119, 119),
+ (136, 136, 136), (170, 170, 170), (187, 187, 187), (221, 221, 221),
+ (238, 238, 238), (192, 192, 192), (128, 0, 0), (128, 0, 128),
+ (0, 128, 0), (0, 128, 128), (0, 0, 0), (0, 0, 0),
+ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0),
+ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0),
+ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0),
+ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0),
+ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0),
+ (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0))
+# fmt: on
+
+
+# so build a prototype image to be used for palette resampling
+def build_prototype_image():
+ image = Image.new("L", (1, len(_Palm8BitColormapValues)))
+ image.putdata(list(range(len(_Palm8BitColormapValues))))
+ palettedata = ()
+ for colormapValue in _Palm8BitColormapValues:
+ palettedata += colormapValue
+ palettedata += (0, 0, 0) * (256 - len(_Palm8BitColormapValues))
+ image.putpalette(palettedata)
+ return image
+
+
+Palm8BitColormapImage = build_prototype_image()
+
+# OK, we now have in Palm8BitColormapImage,
+# a "P"-mode image with the right palette
+#
+# --------------------------------------------------------------------
+
+_FLAGS = {"custom-colormap": 0x4000, "is-compressed": 0x8000, "has-transparent": 0x2000}
+
+_COMPRESSION_TYPES = {"none": 0xFF, "rle": 0x01, "scanline": 0x00}
+
+
+#
+# --------------------------------------------------------------------
+
+##
+# (Internal) Image save plugin for the Palm format.
+
+
+def _save(im, fp, filename):
+ if im.mode == "P":
+ # we assume this is a color Palm image with the standard colormap,
+ # unless the "info" dict has a "custom-colormap" field
+
+ rawmode = "P"
+ bpp = 8
+ version = 1
+
+ elif im.mode == "L":
+ if im.encoderinfo.get("bpp") in (1, 2, 4):
+ # this is 8-bit grayscale, so we shift it to get the high-order bits,
+ # and invert it because
+ # Palm does greyscale from white (0) to black (1)
+ bpp = im.encoderinfo["bpp"]
+ im = im.point(
+ lambda x, shift=8 - bpp, maxval=(1 << bpp) - 1: maxval - (x >> shift)
+ )
+ elif im.info.get("bpp") in (1, 2, 4):
+ # here we assume that even though the inherent mode is 8-bit grayscale,
+ # only the lower bpp bits are significant.
+ # We invert them to match the Palm.
+ bpp = im.info["bpp"]
+ im = im.point(lambda x, maxval=(1 << bpp) - 1: maxval - (x & maxval))
+ else:
+ msg = f"cannot write mode {im.mode} as Palm"
+ raise OSError(msg)
+
+ # we ignore the palette here
+ im.mode = "P"
+ rawmode = "P;" + str(bpp)
+ version = 1
+
+ elif im.mode == "1":
+ # monochrome -- write it inverted, as is the Palm standard
+ rawmode = "1;I"
+ bpp = 1
+ version = 0
+
+ else:
+ msg = f"cannot write mode {im.mode} as Palm"
+ raise OSError(msg)
+
+ #
+ # make sure image data is available
+ im.load()
+
+ # write header
+
+ cols = im.size[0]
+ rows = im.size[1]
+
+ rowbytes = int((cols + (16 // bpp - 1)) / (16 // bpp)) * 2
+ transparent_index = 0
+ compression_type = _COMPRESSION_TYPES["none"]
+
+ flags = 0
+ if im.mode == "P" and "custom-colormap" in im.info:
+ flags = flags & _FLAGS["custom-colormap"]
+ colormapsize = 4 * 256 + 2
+ colormapmode = im.palette.mode
+ colormap = im.getdata().getpalette()
+ else:
+ colormapsize = 0
+
+ if "offset" in im.info:
+ offset = (rowbytes * rows + 16 + 3 + colormapsize) // 4
+ else:
+ offset = 0
+
+ fp.write(o16b(cols) + o16b(rows) + o16b(rowbytes) + o16b(flags))
+ fp.write(o8(bpp))
+ fp.write(o8(version))
+ fp.write(o16b(offset))
+ fp.write(o8(transparent_index))
+ fp.write(o8(compression_type))
+ fp.write(o16b(0)) # reserved by Palm
+
+ # now write colormap if necessary
+
+ if colormapsize > 0:
+ fp.write(o16b(256))
+ for i in range(256):
+ fp.write(o8(i))
+ if colormapmode == "RGB":
+ fp.write(
+ o8(colormap[3 * i])
+ + o8(colormap[3 * i + 1])
+ + o8(colormap[3 * i + 2])
+ )
+ elif colormapmode == "RGBA":
+ fp.write(
+ o8(colormap[4 * i])
+ + o8(colormap[4 * i + 1])
+ + o8(colormap[4 * i + 2])
+ )
+
+ # now convert data to raw form
+ ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, rowbytes, 1))])
+
+ if hasattr(fp, "flush"):
+ fp.flush()
+
+
+#
+# --------------------------------------------------------------------
+
+Image.register_save("Palm", _save)
+
+Image.register_extension("Palm", ".palm")
+
+Image.register_mime("Palm", "image/palm")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/PcdImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/PcdImagePlugin.py
new file mode 100644
index 00000000..c7cbca8c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/PcdImagePlugin.py
@@ -0,0 +1,62 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# PCD file handling
+#
+# History:
+# 96-05-10 fl Created
+# 96-05-27 fl Added draft mode (128x192, 256x384)
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1996.
+#
+# See the README file for information on usage and redistribution.
+#
+
+
+from . import Image, ImageFile
+
+##
+# Image plugin for PhotoCD images. This plugin only reads the 768x512
+# image from the file; higher resolutions are encoded in a proprietary
+# encoding.
+
+
+class PcdImageFile(ImageFile.ImageFile):
+ format = "PCD"
+ format_description = "Kodak PhotoCD"
+
+ def _open(self):
+ # rough
+ self.fp.seek(2048)
+ s = self.fp.read(2048)
+
+ if s[:4] != b"PCD_":
+ msg = "not a PCD file"
+ raise SyntaxError(msg)
+
+ orientation = s[1538] & 3
+ self.tile_post_rotate = None
+ if orientation == 1:
+ self.tile_post_rotate = 90
+ elif orientation == 3:
+ self.tile_post_rotate = -90
+
+ self._mode = "RGB"
+ self._size = 768, 512 # FIXME: not correct for rotated images!
+ self.tile = [("pcd", (0, 0) + self.size, 96 * 2048, None)]
+
+ def load_end(self):
+ if self.tile_post_rotate:
+ # Handle rotated PCDs
+ self.im = self.im.rotate(self.tile_post_rotate)
+ self._size = self.im.size
+
+
+#
+# registry
+
+Image.register_open(PcdImageFile.format, PcdImageFile)
+
+Image.register_extension(PcdImageFile.format, ".pcd")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/PcfFontFile.py b/Backend/venv/lib/python3.12/site-packages/PIL/PcfFontFile.py
new file mode 100644
index 00000000..8db5822f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/PcfFontFile.py
@@ -0,0 +1,256 @@
+#
+# THIS IS WORK IN PROGRESS
+#
+# The Python Imaging Library
+# $Id$
+#
+# portable compiled font file parser
+#
+# history:
+# 1997-08-19 fl created
+# 2003-09-13 fl fixed loading of unicode fonts
+#
+# Copyright (c) 1997-2003 by Secret Labs AB.
+# Copyright (c) 1997-2003 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+
+import io
+
+from . import FontFile, Image
+from ._binary import i8
+from ._binary import i16be as b16
+from ._binary import i16le as l16
+from ._binary import i32be as b32
+from ._binary import i32le as l32
+
+# --------------------------------------------------------------------
+# declarations
+
+PCF_MAGIC = 0x70636601 # "\x01fcp"
+
+PCF_PROPERTIES = 1 << 0
+PCF_ACCELERATORS = 1 << 1
+PCF_METRICS = 1 << 2
+PCF_BITMAPS = 1 << 3
+PCF_INK_METRICS = 1 << 4
+PCF_BDF_ENCODINGS = 1 << 5
+PCF_SWIDTHS = 1 << 6
+PCF_GLYPH_NAMES = 1 << 7
+PCF_BDF_ACCELERATORS = 1 << 8
+
+BYTES_PER_ROW = [
+ lambda bits: ((bits + 7) >> 3),
+ lambda bits: ((bits + 15) >> 3) & ~1,
+ lambda bits: ((bits + 31) >> 3) & ~3,
+ lambda bits: ((bits + 63) >> 3) & ~7,
+]
+
+
+def sz(s, o):
+ return s[o : s.index(b"\0", o)]
+
+
+class PcfFontFile(FontFile.FontFile):
+ """Font file plugin for the X11 PCF format."""
+
+ name = "name"
+
+ def __init__(self, fp, charset_encoding="iso8859-1"):
+ self.charset_encoding = charset_encoding
+
+ magic = l32(fp.read(4))
+ if magic != PCF_MAGIC:
+ msg = "not a PCF file"
+ raise SyntaxError(msg)
+
+ super().__init__()
+
+ count = l32(fp.read(4))
+ self.toc = {}
+ for i in range(count):
+ type = l32(fp.read(4))
+ self.toc[type] = l32(fp.read(4)), l32(fp.read(4)), l32(fp.read(4))
+
+ self.fp = fp
+
+ self.info = self._load_properties()
+
+ metrics = self._load_metrics()
+ bitmaps = self._load_bitmaps(metrics)
+ encoding = self._load_encoding()
+
+ #
+ # create glyph structure
+
+ for ch, ix in enumerate(encoding):
+ if ix is not None:
+ (
+ xsize,
+ ysize,
+ left,
+ right,
+ width,
+ ascent,
+ descent,
+ attributes,
+ ) = metrics[ix]
+ self.glyph[ch] = (
+ (width, 0),
+ (left, descent - ysize, xsize + left, descent),
+ (0, 0, xsize, ysize),
+ bitmaps[ix],
+ )
+
+ def _getformat(self, tag):
+ format, size, offset = self.toc[tag]
+
+ fp = self.fp
+ fp.seek(offset)
+
+ format = l32(fp.read(4))
+
+ if format & 4:
+ i16, i32 = b16, b32
+ else:
+ i16, i32 = l16, l32
+
+ return fp, format, i16, i32
+
+ def _load_properties(self):
+ #
+ # font properties
+
+ properties = {}
+
+ fp, format, i16, i32 = self._getformat(PCF_PROPERTIES)
+
+ nprops = i32(fp.read(4))
+
+ # read property description
+ p = []
+ for i in range(nprops):
+ p.append((i32(fp.read(4)), i8(fp.read(1)), i32(fp.read(4))))
+ if nprops & 3:
+ fp.seek(4 - (nprops & 3), io.SEEK_CUR) # pad
+
+ data = fp.read(i32(fp.read(4)))
+
+ for k, s, v in p:
+ k = sz(data, k)
+ if s:
+ v = sz(data, v)
+ properties[k] = v
+
+ return properties
+
+ def _load_metrics(self):
+ #
+ # font metrics
+
+ metrics = []
+
+ fp, format, i16, i32 = self._getformat(PCF_METRICS)
+
+ append = metrics.append
+
+ if (format & 0xFF00) == 0x100:
+ # "compressed" metrics
+ for i in range(i16(fp.read(2))):
+ left = i8(fp.read(1)) - 128
+ right = i8(fp.read(1)) - 128
+ width = i8(fp.read(1)) - 128
+ ascent = i8(fp.read(1)) - 128
+ descent = i8(fp.read(1)) - 128
+ xsize = right - left
+ ysize = ascent + descent
+ append((xsize, ysize, left, right, width, ascent, descent, 0))
+
+ else:
+ # "jumbo" metrics
+ for i in range(i32(fp.read(4))):
+ left = i16(fp.read(2))
+ right = i16(fp.read(2))
+ width = i16(fp.read(2))
+ ascent = i16(fp.read(2))
+ descent = i16(fp.read(2))
+ attributes = i16(fp.read(2))
+ xsize = right - left
+ ysize = ascent + descent
+ append((xsize, ysize, left, right, width, ascent, descent, attributes))
+
+ return metrics
+
+ def _load_bitmaps(self, metrics):
+ #
+ # bitmap data
+
+ bitmaps = []
+
+ fp, format, i16, i32 = self._getformat(PCF_BITMAPS)
+
+ nbitmaps = i32(fp.read(4))
+
+ if nbitmaps != len(metrics):
+ msg = "Wrong number of bitmaps"
+ raise OSError(msg)
+
+ offsets = []
+ for i in range(nbitmaps):
+ offsets.append(i32(fp.read(4)))
+
+ bitmap_sizes = []
+ for i in range(4):
+ bitmap_sizes.append(i32(fp.read(4)))
+
+ # byteorder = format & 4 # non-zero => MSB
+ bitorder = format & 8 # non-zero => MSB
+ padindex = format & 3
+
+ bitmapsize = bitmap_sizes[padindex]
+ offsets.append(bitmapsize)
+
+ data = fp.read(bitmapsize)
+
+ pad = BYTES_PER_ROW[padindex]
+ mode = "1;R"
+ if bitorder:
+ mode = "1"
+
+ for i in range(nbitmaps):
+ xsize, ysize = metrics[i][:2]
+ b, e = offsets[i : i + 2]
+ bitmaps.append(
+ Image.frombytes("1", (xsize, ysize), data[b:e], "raw", mode, pad(xsize))
+ )
+
+ return bitmaps
+
+ def _load_encoding(self):
+ fp, format, i16, i32 = self._getformat(PCF_BDF_ENCODINGS)
+
+ first_col, last_col = i16(fp.read(2)), i16(fp.read(2))
+ first_row, last_row = i16(fp.read(2)), i16(fp.read(2))
+
+ i16(fp.read(2)) # default
+
+ nencoding = (last_col - first_col + 1) * (last_row - first_row + 1)
+
+ # map character code to bitmap index
+ encoding = [None] * min(256, nencoding)
+
+ encoding_offsets = [i16(fp.read(2)) for _ in range(nencoding)]
+
+ for i in range(first_col, len(encoding)):
+ try:
+ encoding_offset = encoding_offsets[
+ ord(bytearray([i]).decode(self.charset_encoding))
+ ]
+ if encoding_offset != 0xFFFF:
+ encoding[i] = encoding_offset
+ except UnicodeDecodeError:
+ # character is not supported in selected encoding
+ pass
+
+ return encoding
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/PcxImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/PcxImagePlugin.py
new file mode 100644
index 00000000..854d9e83
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/PcxImagePlugin.py
@@ -0,0 +1,221 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# PCX file handling
+#
+# This format was originally used by ZSoft's popular PaintBrush
+# program for the IBM PC. It is also supported by many MS-DOS and
+# Windows applications, including the Windows PaintBrush program in
+# Windows 3.
+#
+# history:
+# 1995-09-01 fl Created
+# 1996-05-20 fl Fixed RGB support
+# 1997-01-03 fl Fixed 2-bit and 4-bit support
+# 1999-02-03 fl Fixed 8-bit support (broken in 1.0b1)
+# 1999-02-07 fl Added write support
+# 2002-06-09 fl Made 2-bit and 4-bit support a bit more robust
+# 2002-07-30 fl Seek from to current position, not beginning of file
+# 2003-06-03 fl Extract DPI settings (info["dpi"])
+#
+# Copyright (c) 1997-2003 by Secret Labs AB.
+# Copyright (c) 1995-2003 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+
+import io
+import logging
+
+from . import Image, ImageFile, ImagePalette
+from ._binary import i16le as i16
+from ._binary import o8
+from ._binary import o16le as o16
+
+logger = logging.getLogger(__name__)
+
+
+def _accept(prefix):
+ return prefix[0] == 10 and prefix[1] in [0, 2, 3, 5]
+
+
+##
+# Image plugin for Paintbrush images.
+
+
+class PcxImageFile(ImageFile.ImageFile):
+ format = "PCX"
+ format_description = "Paintbrush"
+
+ def _open(self):
+ # header
+ s = self.fp.read(128)
+ if not _accept(s):
+ msg = "not a PCX file"
+ raise SyntaxError(msg)
+
+ # image
+ bbox = i16(s, 4), i16(s, 6), i16(s, 8) + 1, i16(s, 10) + 1
+ if bbox[2] <= bbox[0] or bbox[3] <= bbox[1]:
+ msg = "bad PCX image size"
+ raise SyntaxError(msg)
+ logger.debug("BBox: %s %s %s %s", *bbox)
+
+ # format
+ version = s[1]
+ bits = s[3]
+ planes = s[65]
+ provided_stride = i16(s, 66)
+ logger.debug(
+ "PCX version %s, bits %s, planes %s, stride %s",
+ version,
+ bits,
+ planes,
+ provided_stride,
+ )
+
+ self.info["dpi"] = i16(s, 12), i16(s, 14)
+
+ if bits == 1 and planes == 1:
+ mode = rawmode = "1"
+
+ elif bits == 1 and planes in (2, 4):
+ mode = "P"
+ rawmode = "P;%dL" % planes
+ self.palette = ImagePalette.raw("RGB", s[16:64])
+
+ elif version == 5 and bits == 8 and planes == 1:
+ mode = rawmode = "L"
+ # FIXME: hey, this doesn't work with the incremental loader !!!
+ self.fp.seek(-769, io.SEEK_END)
+ s = self.fp.read(769)
+ if len(s) == 769 and s[0] == 12:
+ # check if the palette is linear greyscale
+ for i in range(256):
+ if s[i * 3 + 1 : i * 3 + 4] != o8(i) * 3:
+ mode = rawmode = "P"
+ break
+ if mode == "P":
+ self.palette = ImagePalette.raw("RGB", s[1:])
+ self.fp.seek(128)
+
+ elif version == 5 and bits == 8 and planes == 3:
+ mode = "RGB"
+ rawmode = "RGB;L"
+
+ else:
+ msg = "unknown PCX mode"
+ raise OSError(msg)
+
+ self._mode = mode
+ self._size = bbox[2] - bbox[0], bbox[3] - bbox[1]
+
+ # Don't trust the passed in stride.
+ # Calculate the approximate position for ourselves.
+ # CVE-2020-35653
+ stride = (self._size[0] * bits + 7) // 8
+
+ # While the specification states that this must be even,
+ # not all images follow this
+ if provided_stride != stride:
+ stride += stride % 2
+
+ bbox = (0, 0) + self.size
+ logger.debug("size: %sx%s", *self.size)
+
+ self.tile = [("pcx", bbox, self.fp.tell(), (rawmode, planes * stride))]
+
+
+# --------------------------------------------------------------------
+# save PCX files
+
+
+SAVE = {
+ # mode: (version, bits, planes, raw mode)
+ "1": (2, 1, 1, "1"),
+ "L": (5, 8, 1, "L"),
+ "P": (5, 8, 1, "P"),
+ "RGB": (5, 8, 3, "RGB;L"),
+}
+
+
+def _save(im, fp, filename):
+ try:
+ version, bits, planes, rawmode = SAVE[im.mode]
+ except KeyError as e:
+ msg = f"Cannot save {im.mode} images as PCX"
+ raise ValueError(msg) from e
+
+ # bytes per plane
+ stride = (im.size[0] * bits + 7) // 8
+ # stride should be even
+ stride += stride % 2
+ # Stride needs to be kept in sync with the PcxEncode.c version.
+ # Ideally it should be passed in in the state, but the bytes value
+ # gets overwritten.
+
+ logger.debug(
+ "PcxImagePlugin._save: xwidth: %d, bits: %d, stride: %d",
+ im.size[0],
+ bits,
+ stride,
+ )
+
+ # under windows, we could determine the current screen size with
+ # "Image.core.display_mode()[1]", but I think that's overkill...
+
+ screen = im.size
+
+ dpi = 100, 100
+
+ # PCX header
+ fp.write(
+ o8(10)
+ + o8(version)
+ + o8(1)
+ + o8(bits)
+ + o16(0)
+ + o16(0)
+ + o16(im.size[0] - 1)
+ + o16(im.size[1] - 1)
+ + o16(dpi[0])
+ + o16(dpi[1])
+ + b"\0" * 24
+ + b"\xFF" * 24
+ + b"\0"
+ + o8(planes)
+ + o16(stride)
+ + o16(1)
+ + o16(screen[0])
+ + o16(screen[1])
+ + b"\0" * 54
+ )
+
+ assert fp.tell() == 128
+
+ ImageFile._save(im, fp, [("pcx", (0, 0) + im.size, 0, (rawmode, bits * planes))])
+
+ if im.mode == "P":
+ # colour palette
+ fp.write(o8(12))
+ palette = im.im.getpalette("RGB", "RGB")
+ palette += b"\x00" * (768 - len(palette))
+ fp.write(palette) # 768 bytes
+ elif im.mode == "L":
+ # greyscale palette
+ fp.write(o8(12))
+ for i in range(256):
+ fp.write(o8(i) * 3)
+
+
+# --------------------------------------------------------------------
+# registry
+
+
+Image.register_open(PcxImageFile.format, PcxImageFile, _accept)
+Image.register_save(PcxImageFile.format, _save)
+
+Image.register_extension(PcxImageFile.format, ".pcx")
+
+Image.register_mime(PcxImageFile.format, "image/x-pcx")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/PdfImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/PdfImagePlugin.py
new file mode 100644
index 00000000..09fc0c7e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/PdfImagePlugin.py
@@ -0,0 +1,302 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# PDF (Acrobat) file handling
+#
+# History:
+# 1996-07-16 fl Created
+# 1997-01-18 fl Fixed header
+# 2004-02-21 fl Fixes for 1/L/CMYK images, etc.
+# 2004-02-24 fl Fixes for 1 and P images.
+#
+# Copyright (c) 1997-2004 by Secret Labs AB. All rights reserved.
+# Copyright (c) 1996-1997 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+
+##
+# Image plugin for PDF images (output only).
+##
+
+import io
+import math
+import os
+import time
+
+from . import Image, ImageFile, ImageSequence, PdfParser, __version__, features
+
+#
+# --------------------------------------------------------------------
+
+# object ids:
+# 1. catalogue
+# 2. pages
+# 3. image
+# 4. page
+# 5. page contents
+
+
+def _save_all(im, fp, filename):
+ _save(im, fp, filename, save_all=True)
+
+
+##
+# (Internal) Image save plugin for the PDF format.
+
+
+def _write_image(im, filename, existing_pdf, image_refs):
+ # FIXME: Should replace ASCIIHexDecode with RunLengthDecode
+ # (packbits) or LZWDecode (tiff/lzw compression). Note that
+ # PDF 1.2 also supports Flatedecode (zip compression).
+
+ params = None
+ decode = None
+
+ #
+ # Get image characteristics
+
+ width, height = im.size
+
+ dict_obj = {"BitsPerComponent": 8}
+ if im.mode == "1":
+ if features.check("libtiff"):
+ filter = "CCITTFaxDecode"
+ dict_obj["BitsPerComponent"] = 1
+ params = PdfParser.PdfArray(
+ [
+ PdfParser.PdfDict(
+ {
+ "K": -1,
+ "BlackIs1": True,
+ "Columns": width,
+ "Rows": height,
+ }
+ )
+ ]
+ )
+ else:
+ filter = "DCTDecode"
+ dict_obj["ColorSpace"] = PdfParser.PdfName("DeviceGray")
+ procset = "ImageB" # grayscale
+ elif im.mode == "L":
+ filter = "DCTDecode"
+ # params = f"<< /Predictor 15 /Columns {width-2} >>"
+ dict_obj["ColorSpace"] = PdfParser.PdfName("DeviceGray")
+ procset = "ImageB" # grayscale
+ elif im.mode == "LA":
+ filter = "JPXDecode"
+ # params = f"<< /Predictor 15 /Columns {width-2} >>"
+ procset = "ImageB" # grayscale
+ dict_obj["SMaskInData"] = 1
+ elif im.mode == "P":
+ filter = "ASCIIHexDecode"
+ palette = im.getpalette()
+ dict_obj["ColorSpace"] = [
+ PdfParser.PdfName("Indexed"),
+ PdfParser.PdfName("DeviceRGB"),
+ 255,
+ PdfParser.PdfBinary(palette),
+ ]
+ procset = "ImageI" # indexed color
+
+ if "transparency" in im.info:
+ smask = im.convert("LA").getchannel("A")
+ smask.encoderinfo = {}
+
+ image_ref = _write_image(smask, filename, existing_pdf, image_refs)[0]
+ dict_obj["SMask"] = image_ref
+ elif im.mode == "RGB":
+ filter = "DCTDecode"
+ dict_obj["ColorSpace"] = PdfParser.PdfName("DeviceRGB")
+ procset = "ImageC" # color images
+ elif im.mode == "RGBA":
+ filter = "JPXDecode"
+ procset = "ImageC" # color images
+ dict_obj["SMaskInData"] = 1
+ elif im.mode == "CMYK":
+ filter = "DCTDecode"
+ dict_obj["ColorSpace"] = PdfParser.PdfName("DeviceCMYK")
+ procset = "ImageC" # color images
+ decode = [1, 0, 1, 0, 1, 0, 1, 0]
+ else:
+ msg = f"cannot save mode {im.mode}"
+ raise ValueError(msg)
+
+ #
+ # image
+
+ op = io.BytesIO()
+
+ if filter == "ASCIIHexDecode":
+ ImageFile._save(im, op, [("hex", (0, 0) + im.size, 0, im.mode)])
+ elif filter == "CCITTFaxDecode":
+ im.save(
+ op,
+ "TIFF",
+ compression="group4",
+ # use a single strip
+ strip_size=math.ceil(width / 8) * height,
+ )
+ elif filter == "DCTDecode":
+ Image.SAVE["JPEG"](im, op, filename)
+ elif filter == "JPXDecode":
+ del dict_obj["BitsPerComponent"]
+ Image.SAVE["JPEG2000"](im, op, filename)
+ else:
+ msg = f"unsupported PDF filter ({filter})"
+ raise ValueError(msg)
+
+ stream = op.getvalue()
+ if filter == "CCITTFaxDecode":
+ stream = stream[8:]
+ filter = PdfParser.PdfArray([PdfParser.PdfName(filter)])
+ else:
+ filter = PdfParser.PdfName(filter)
+
+ image_ref = image_refs.pop(0)
+ existing_pdf.write_obj(
+ image_ref,
+ stream=stream,
+ Type=PdfParser.PdfName("XObject"),
+ Subtype=PdfParser.PdfName("Image"),
+ Width=width, # * 72.0 / x_resolution,
+ Height=height, # * 72.0 / y_resolution,
+ Filter=filter,
+ Decode=decode,
+ DecodeParms=params,
+ **dict_obj,
+ )
+
+ return image_ref, procset
+
+
+def _save(im, fp, filename, save_all=False):
+ is_appending = im.encoderinfo.get("append", False)
+ if is_appending:
+ existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="r+b")
+ else:
+ existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="w+b")
+
+ dpi = im.encoderinfo.get("dpi")
+ if dpi:
+ x_resolution = dpi[0]
+ y_resolution = dpi[1]
+ else:
+ x_resolution = y_resolution = im.encoderinfo.get("resolution", 72.0)
+
+ info = {
+ "title": None
+ if is_appending
+ else os.path.splitext(os.path.basename(filename))[0],
+ "author": None,
+ "subject": None,
+ "keywords": None,
+ "creator": None,
+ "producer": None,
+ "creationDate": None if is_appending else time.gmtime(),
+ "modDate": None if is_appending else time.gmtime(),
+ }
+ for k, default in info.items():
+ v = im.encoderinfo.get(k) if k in im.encoderinfo else default
+ if v:
+ existing_pdf.info[k[0].upper() + k[1:]] = v
+
+ #
+ # make sure image data is available
+ im.load()
+
+ existing_pdf.start_writing()
+ existing_pdf.write_header()
+ existing_pdf.write_comment(f"created by Pillow {__version__} PDF driver")
+
+ #
+ # pages
+ ims = [im]
+ if save_all:
+ append_images = im.encoderinfo.get("append_images", [])
+ for append_im in append_images:
+ append_im.encoderinfo = im.encoderinfo.copy()
+ ims.append(append_im)
+ number_of_pages = 0
+ image_refs = []
+ page_refs = []
+ contents_refs = []
+ for im in ims:
+ im_number_of_pages = 1
+ if save_all:
+ try:
+ im_number_of_pages = im.n_frames
+ except AttributeError:
+ # Image format does not have n_frames.
+ # It is a single frame image
+ pass
+ number_of_pages += im_number_of_pages
+ for i in range(im_number_of_pages):
+ image_refs.append(existing_pdf.next_object_id(0))
+ if im.mode == "P" and "transparency" in im.info:
+ image_refs.append(existing_pdf.next_object_id(0))
+
+ page_refs.append(existing_pdf.next_object_id(0))
+ contents_refs.append(existing_pdf.next_object_id(0))
+ existing_pdf.pages.append(page_refs[-1])
+
+ #
+ # catalog and list of pages
+ existing_pdf.write_catalog()
+
+ page_number = 0
+ for im_sequence in ims:
+ im_pages = ImageSequence.Iterator(im_sequence) if save_all else [im_sequence]
+ for im in im_pages:
+ image_ref, procset = _write_image(im, filename, existing_pdf, image_refs)
+
+ #
+ # page
+
+ existing_pdf.write_page(
+ page_refs[page_number],
+ Resources=PdfParser.PdfDict(
+ ProcSet=[PdfParser.PdfName("PDF"), PdfParser.PdfName(procset)],
+ XObject=PdfParser.PdfDict(image=image_ref),
+ ),
+ MediaBox=[
+ 0,
+ 0,
+ im.width * 72.0 / x_resolution,
+ im.height * 72.0 / y_resolution,
+ ],
+ Contents=contents_refs[page_number],
+ )
+
+ #
+ # page contents
+
+ page_contents = b"q %f 0 0 %f 0 0 cm /image Do Q\n" % (
+ im.width * 72.0 / x_resolution,
+ im.height * 72.0 / y_resolution,
+ )
+
+ existing_pdf.write_obj(contents_refs[page_number], stream=page_contents)
+
+ page_number += 1
+
+ #
+ # trailer
+ existing_pdf.write_xref_and_trailer()
+ if hasattr(fp, "flush"):
+ fp.flush()
+ existing_pdf.close()
+
+
+#
+# --------------------------------------------------------------------
+
+
+Image.register_save("PDF", _save)
+Image.register_save_all("PDF", _save_all)
+
+Image.register_extension("PDF", ".pdf")
+
+Image.register_mime("PDF", "application/pdf")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/PdfParser.py b/Backend/venv/lib/python3.12/site-packages/PIL/PdfParser.py
new file mode 100644
index 00000000..dc1012f5
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/PdfParser.py
@@ -0,0 +1,996 @@
+import calendar
+import codecs
+import collections
+import mmap
+import os
+import re
+import time
+import zlib
+
+
+# see 7.9.2.2 Text String Type on page 86 and D.3 PDFDocEncoding Character Set
+# on page 656
+def encode_text(s):
+ return codecs.BOM_UTF16_BE + s.encode("utf_16_be")
+
+
+PDFDocEncoding = {
+ 0x16: "\u0017",
+ 0x18: "\u02D8",
+ 0x19: "\u02C7",
+ 0x1A: "\u02C6",
+ 0x1B: "\u02D9",
+ 0x1C: "\u02DD",
+ 0x1D: "\u02DB",
+ 0x1E: "\u02DA",
+ 0x1F: "\u02DC",
+ 0x80: "\u2022",
+ 0x81: "\u2020",
+ 0x82: "\u2021",
+ 0x83: "\u2026",
+ 0x84: "\u2014",
+ 0x85: "\u2013",
+ 0x86: "\u0192",
+ 0x87: "\u2044",
+ 0x88: "\u2039",
+ 0x89: "\u203A",
+ 0x8A: "\u2212",
+ 0x8B: "\u2030",
+ 0x8C: "\u201E",
+ 0x8D: "\u201C",
+ 0x8E: "\u201D",
+ 0x8F: "\u2018",
+ 0x90: "\u2019",
+ 0x91: "\u201A",
+ 0x92: "\u2122",
+ 0x93: "\uFB01",
+ 0x94: "\uFB02",
+ 0x95: "\u0141",
+ 0x96: "\u0152",
+ 0x97: "\u0160",
+ 0x98: "\u0178",
+ 0x99: "\u017D",
+ 0x9A: "\u0131",
+ 0x9B: "\u0142",
+ 0x9C: "\u0153",
+ 0x9D: "\u0161",
+ 0x9E: "\u017E",
+ 0xA0: "\u20AC",
+}
+
+
+def decode_text(b):
+ if b[: len(codecs.BOM_UTF16_BE)] == codecs.BOM_UTF16_BE:
+ return b[len(codecs.BOM_UTF16_BE) :].decode("utf_16_be")
+ else:
+ return "".join(PDFDocEncoding.get(byte, chr(byte)) for byte in b)
+
+
+class PdfFormatError(RuntimeError):
+ """An error that probably indicates a syntactic or semantic error in the
+ PDF file structure"""
+
+ pass
+
+
+def check_format_condition(condition, error_message):
+ if not condition:
+ raise PdfFormatError(error_message)
+
+
+class IndirectReference(
+ collections.namedtuple("IndirectReferenceTuple", ["object_id", "generation"])
+):
+ def __str__(self):
+ return "%s %s R" % self
+
+ def __bytes__(self):
+ return self.__str__().encode("us-ascii")
+
+ def __eq__(self, other):
+ return (
+ other.__class__ is self.__class__
+ and other.object_id == self.object_id
+ and other.generation == self.generation
+ )
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __hash__(self):
+ return hash((self.object_id, self.generation))
+
+
+class IndirectObjectDef(IndirectReference):
+ def __str__(self):
+ return "%s %s obj" % self
+
+
+class XrefTable:
+ def __init__(self):
+ self.existing_entries = {} # object ID => (offset, generation)
+ self.new_entries = {} # object ID => (offset, generation)
+ self.deleted_entries = {0: 65536} # object ID => generation
+ self.reading_finished = False
+
+ def __setitem__(self, key, value):
+ if self.reading_finished:
+ self.new_entries[key] = value
+ else:
+ self.existing_entries[key] = value
+ if key in self.deleted_entries:
+ del self.deleted_entries[key]
+
+ def __getitem__(self, key):
+ try:
+ return self.new_entries[key]
+ except KeyError:
+ return self.existing_entries[key]
+
+ def __delitem__(self, key):
+ if key in self.new_entries:
+ generation = self.new_entries[key][1] + 1
+ del self.new_entries[key]
+ self.deleted_entries[key] = generation
+ elif key in self.existing_entries:
+ generation = self.existing_entries[key][1] + 1
+ self.deleted_entries[key] = generation
+ elif key in self.deleted_entries:
+ generation = self.deleted_entries[key]
+ else:
+ msg = (
+ "object ID " + str(key) + " cannot be deleted because it doesn't exist"
+ )
+ raise IndexError(msg)
+
+ def __contains__(self, key):
+ return key in self.existing_entries or key in self.new_entries
+
+ def __len__(self):
+ return len(
+ set(self.existing_entries.keys())
+ | set(self.new_entries.keys())
+ | set(self.deleted_entries.keys())
+ )
+
+ def keys(self):
+ return (
+ set(self.existing_entries.keys()) - set(self.deleted_entries.keys())
+ ) | set(self.new_entries.keys())
+
+ def write(self, f):
+ keys = sorted(set(self.new_entries.keys()) | set(self.deleted_entries.keys()))
+ deleted_keys = sorted(set(self.deleted_entries.keys()))
+ startxref = f.tell()
+ f.write(b"xref\n")
+ while keys:
+ # find a contiguous sequence of object IDs
+ prev = None
+ for index, key in enumerate(keys):
+ if prev is None or prev + 1 == key:
+ prev = key
+ else:
+ contiguous_keys = keys[:index]
+ keys = keys[index:]
+ break
+ else:
+ contiguous_keys = keys
+ keys = None
+ f.write(b"%d %d\n" % (contiguous_keys[0], len(contiguous_keys)))
+ for object_id in contiguous_keys:
+ if object_id in self.new_entries:
+ f.write(b"%010d %05d n \n" % self.new_entries[object_id])
+ else:
+ this_deleted_object_id = deleted_keys.pop(0)
+ check_format_condition(
+ object_id == this_deleted_object_id,
+ f"expected the next deleted object ID to be {object_id}, "
+ f"instead found {this_deleted_object_id}",
+ )
+ try:
+ next_in_linked_list = deleted_keys[0]
+ except IndexError:
+ next_in_linked_list = 0
+ f.write(
+ b"%010d %05d f \n"
+ % (next_in_linked_list, self.deleted_entries[object_id])
+ )
+ return startxref
+
+
+class PdfName:
+ def __init__(self, name):
+ if isinstance(name, PdfName):
+ self.name = name.name
+ elif isinstance(name, bytes):
+ self.name = name
+ else:
+ self.name = name.encode("us-ascii")
+
+ def name_as_str(self):
+ return self.name.decode("us-ascii")
+
+ def __eq__(self, other):
+ return (
+ isinstance(other, PdfName) and other.name == self.name
+ ) or other == self.name
+
+ def __hash__(self):
+ return hash(self.name)
+
+ def __repr__(self):
+ return f"PdfName({repr(self.name)})"
+
+ @classmethod
+ def from_pdf_stream(cls, data):
+ return cls(PdfParser.interpret_name(data))
+
+ allowed_chars = set(range(33, 127)) - {ord(c) for c in "#%/()<>[]{}"}
+
+ def __bytes__(self):
+ result = bytearray(b"/")
+ for b in self.name:
+ if b in self.allowed_chars:
+ result.append(b)
+ else:
+ result.extend(b"#%02X" % b)
+ return bytes(result)
+
+
+class PdfArray(list):
+ def __bytes__(self):
+ return b"[ " + b" ".join(pdf_repr(x) for x in self) + b" ]"
+
+
+class PdfDict(collections.UserDict):
+ def __setattr__(self, key, value):
+ if key == "data":
+ collections.UserDict.__setattr__(self, key, value)
+ else:
+ self[key.encode("us-ascii")] = value
+
+ def __getattr__(self, key):
+ try:
+ value = self[key.encode("us-ascii")]
+ except KeyError as e:
+ raise AttributeError(key) from e
+ if isinstance(value, bytes):
+ value = decode_text(value)
+ if key.endswith("Date"):
+ if value.startswith("D:"):
+ value = value[2:]
+
+ relationship = "Z"
+ if len(value) > 17:
+ relationship = value[14]
+ offset = int(value[15:17]) * 60
+ if len(value) > 20:
+ offset += int(value[18:20])
+
+ format = "%Y%m%d%H%M%S"[: len(value) - 2]
+ value = time.strptime(value[: len(format) + 2], format)
+ if relationship in ["+", "-"]:
+ offset *= 60
+ if relationship == "+":
+ offset *= -1
+ value = time.gmtime(calendar.timegm(value) + offset)
+ return value
+
+ def __bytes__(self):
+ out = bytearray(b"<<")
+ for key, value in self.items():
+ if value is None:
+ continue
+ value = pdf_repr(value)
+ out.extend(b"\n")
+ out.extend(bytes(PdfName(key)))
+ out.extend(b" ")
+ out.extend(value)
+ out.extend(b"\n>>")
+ return bytes(out)
+
+
+class PdfBinary:
+ def __init__(self, data):
+ self.data = data
+
+ def __bytes__(self):
+ return b"<%s>" % b"".join(b"%02X" % b for b in self.data)
+
+
+class PdfStream:
+ def __init__(self, dictionary, buf):
+ self.dictionary = dictionary
+ self.buf = buf
+
+ def decode(self):
+ try:
+ filter = self.dictionary.Filter
+ except AttributeError:
+ return self.buf
+ if filter == b"FlateDecode":
+ try:
+ expected_length = self.dictionary.DL
+ except AttributeError:
+ expected_length = self.dictionary.Length
+ return zlib.decompress(self.buf, bufsize=int(expected_length))
+ else:
+ msg = f"stream filter {repr(self.dictionary.Filter)} unknown/unsupported"
+ raise NotImplementedError(msg)
+
+
+def pdf_repr(x):
+ if x is True:
+ return b"true"
+ elif x is False:
+ return b"false"
+ elif x is None:
+ return b"null"
+ elif isinstance(x, (PdfName, PdfDict, PdfArray, PdfBinary)):
+ return bytes(x)
+ elif isinstance(x, (int, float)):
+ return str(x).encode("us-ascii")
+ elif isinstance(x, time.struct_time):
+ return b"(D:" + time.strftime("%Y%m%d%H%M%SZ", x).encode("us-ascii") + b")"
+ elif isinstance(x, dict):
+ return bytes(PdfDict(x))
+ elif isinstance(x, list):
+ return bytes(PdfArray(x))
+ elif isinstance(x, str):
+ return pdf_repr(encode_text(x))
+ elif isinstance(x, bytes):
+ # XXX escape more chars? handle binary garbage
+ x = x.replace(b"\\", b"\\\\")
+ x = x.replace(b"(", b"\\(")
+ x = x.replace(b")", b"\\)")
+ return b"(" + x + b")"
+ else:
+ return bytes(x)
+
+
+class PdfParser:
+ """Based on
+ https://www.adobe.com/content/dam/acom/en/devnet/acrobat/pdfs/PDF32000_2008.pdf
+ Supports PDF up to 1.4
+ """
+
+ def __init__(self, filename=None, f=None, buf=None, start_offset=0, mode="rb"):
+ if buf and f:
+ msg = "specify buf or f or filename, but not both buf and f"
+ raise RuntimeError(msg)
+ self.filename = filename
+ self.buf = buf
+ self.f = f
+ self.start_offset = start_offset
+ self.should_close_buf = False
+ self.should_close_file = False
+ if filename is not None and f is None:
+ self.f = f = open(filename, mode)
+ self.should_close_file = True
+ if f is not None:
+ self.buf = buf = self.get_buf_from_file(f)
+ self.should_close_buf = True
+ if not filename and hasattr(f, "name"):
+ self.filename = f.name
+ self.cached_objects = {}
+ if buf:
+ self.read_pdf_info()
+ else:
+ self.file_size_total = self.file_size_this = 0
+ self.root = PdfDict()
+ self.root_ref = None
+ self.info = PdfDict()
+ self.info_ref = None
+ self.page_tree_root = {}
+ self.pages = []
+ self.orig_pages = []
+ self.pages_ref = None
+ self.last_xref_section_offset = None
+ self.trailer_dict = {}
+ self.xref_table = XrefTable()
+ self.xref_table.reading_finished = True
+ if f:
+ self.seek_end()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+ return False # do not suppress exceptions
+
+ def start_writing(self):
+ self.close_buf()
+ self.seek_end()
+
+ def close_buf(self):
+ try:
+ self.buf.close()
+ except AttributeError:
+ pass
+ self.buf = None
+
+ def close(self):
+ if self.should_close_buf:
+ self.close_buf()
+ if self.f is not None and self.should_close_file:
+ self.f.close()
+ self.f = None
+
+ def seek_end(self):
+ self.f.seek(0, os.SEEK_END)
+
+ def write_header(self):
+ self.f.write(b"%PDF-1.4\n")
+
+ def write_comment(self, s):
+ self.f.write(f"% {s}\n".encode())
+
+ def write_catalog(self):
+ self.del_root()
+ self.root_ref = self.next_object_id(self.f.tell())
+ self.pages_ref = self.next_object_id(0)
+ self.rewrite_pages()
+ self.write_obj(self.root_ref, Type=PdfName(b"Catalog"), Pages=self.pages_ref)
+ self.write_obj(
+ self.pages_ref,
+ Type=PdfName(b"Pages"),
+ Count=len(self.pages),
+ Kids=self.pages,
+ )
+ return self.root_ref
+
+ def rewrite_pages(self):
+ pages_tree_nodes_to_delete = []
+ for i, page_ref in enumerate(self.orig_pages):
+ page_info = self.cached_objects[page_ref]
+ del self.xref_table[page_ref.object_id]
+ pages_tree_nodes_to_delete.append(page_info[PdfName(b"Parent")])
+ if page_ref not in self.pages:
+ # the page has been deleted
+ continue
+ # make dict keys into strings for passing to write_page
+ stringified_page_info = {}
+ for key, value in page_info.items():
+ # key should be a PdfName
+ stringified_page_info[key.name_as_str()] = value
+ stringified_page_info["Parent"] = self.pages_ref
+ new_page_ref = self.write_page(None, **stringified_page_info)
+ for j, cur_page_ref in enumerate(self.pages):
+ if cur_page_ref == page_ref:
+ # replace the page reference with the new one
+ self.pages[j] = new_page_ref
+ # delete redundant Pages tree nodes from xref table
+ for pages_tree_node_ref in pages_tree_nodes_to_delete:
+ while pages_tree_node_ref:
+ pages_tree_node = self.cached_objects[pages_tree_node_ref]
+ if pages_tree_node_ref.object_id in self.xref_table:
+ del self.xref_table[pages_tree_node_ref.object_id]
+ pages_tree_node_ref = pages_tree_node.get(b"Parent", None)
+ self.orig_pages = []
+
+ def write_xref_and_trailer(self, new_root_ref=None):
+ if new_root_ref:
+ self.del_root()
+ self.root_ref = new_root_ref
+ if self.info:
+ self.info_ref = self.write_obj(None, self.info)
+ start_xref = self.xref_table.write(self.f)
+ num_entries = len(self.xref_table)
+ trailer_dict = {b"Root": self.root_ref, b"Size": num_entries}
+ if self.last_xref_section_offset is not None:
+ trailer_dict[b"Prev"] = self.last_xref_section_offset
+ if self.info:
+ trailer_dict[b"Info"] = self.info_ref
+ self.last_xref_section_offset = start_xref
+ self.f.write(
+ b"trailer\n"
+ + bytes(PdfDict(trailer_dict))
+ + b"\nstartxref\n%d\n%%%%EOF" % start_xref
+ )
+
+ def write_page(self, ref, *objs, **dict_obj):
+ if isinstance(ref, int):
+ ref = self.pages[ref]
+ if "Type" not in dict_obj:
+ dict_obj["Type"] = PdfName(b"Page")
+ if "Parent" not in dict_obj:
+ dict_obj["Parent"] = self.pages_ref
+ return self.write_obj(ref, *objs, **dict_obj)
+
+ def write_obj(self, ref, *objs, **dict_obj):
+ f = self.f
+ if ref is None:
+ ref = self.next_object_id(f.tell())
+ else:
+ self.xref_table[ref.object_id] = (f.tell(), ref.generation)
+ f.write(bytes(IndirectObjectDef(*ref)))
+ stream = dict_obj.pop("stream", None)
+ if stream is not None:
+ dict_obj["Length"] = len(stream)
+ if dict_obj:
+ f.write(pdf_repr(dict_obj))
+ for obj in objs:
+ f.write(pdf_repr(obj))
+ if stream is not None:
+ f.write(b"stream\n")
+ f.write(stream)
+ f.write(b"\nendstream\n")
+ f.write(b"endobj\n")
+ return ref
+
+ def del_root(self):
+ if self.root_ref is None:
+ return
+ del self.xref_table[self.root_ref.object_id]
+ del self.xref_table[self.root[b"Pages"].object_id]
+
+ @staticmethod
+ def get_buf_from_file(f):
+ if hasattr(f, "getbuffer"):
+ return f.getbuffer()
+ elif hasattr(f, "getvalue"):
+ return f.getvalue()
+ else:
+ try:
+ return mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)
+ except ValueError: # cannot mmap an empty file
+ return b""
+
+ def read_pdf_info(self):
+ self.file_size_total = len(self.buf)
+ self.file_size_this = self.file_size_total - self.start_offset
+ self.read_trailer()
+ self.root_ref = self.trailer_dict[b"Root"]
+ self.info_ref = self.trailer_dict.get(b"Info", None)
+ self.root = PdfDict(self.read_indirect(self.root_ref))
+ if self.info_ref is None:
+ self.info = PdfDict()
+ else:
+ self.info = PdfDict(self.read_indirect(self.info_ref))
+ check_format_condition(b"Type" in self.root, "/Type missing in Root")
+ check_format_condition(
+ self.root[b"Type"] == b"Catalog", "/Type in Root is not /Catalog"
+ )
+ check_format_condition(b"Pages" in self.root, "/Pages missing in Root")
+ check_format_condition(
+ isinstance(self.root[b"Pages"], IndirectReference),
+ "/Pages in Root is not an indirect reference",
+ )
+ self.pages_ref = self.root[b"Pages"]
+ self.page_tree_root = self.read_indirect(self.pages_ref)
+ self.pages = self.linearize_page_tree(self.page_tree_root)
+ # save the original list of page references
+ # in case the user modifies, adds or deletes some pages
+ # and we need to rewrite the pages and their list
+ self.orig_pages = self.pages[:]
+
+ def next_object_id(self, offset=None):
+ try:
+ # TODO: support reuse of deleted objects
+ reference = IndirectReference(max(self.xref_table.keys()) + 1, 0)
+ except ValueError:
+ reference = IndirectReference(1, 0)
+ if offset is not None:
+ self.xref_table[reference.object_id] = (offset, 0)
+ return reference
+
+ delimiter = rb"[][()<>{}/%]"
+ delimiter_or_ws = rb"[][()<>{}/%\000\011\012\014\015\040]"
+ whitespace = rb"[\000\011\012\014\015\040]"
+ whitespace_or_hex = rb"[\000\011\012\014\015\0400-9a-fA-F]"
+ whitespace_optional = whitespace + b"*"
+ whitespace_mandatory = whitespace + b"+"
+ # No "\012" aka "\n" or "\015" aka "\r":
+ whitespace_optional_no_nl = rb"[\000\011\014\040]*"
+ newline_only = rb"[\r\n]+"
+ newline = whitespace_optional_no_nl + newline_only + whitespace_optional_no_nl
+ re_trailer_end = re.compile(
+ whitespace_mandatory
+ + rb"trailer"
+ + whitespace_optional
+ + rb"<<(.*>>)"
+ + newline
+ + rb"startxref"
+ + newline
+ + rb"([0-9]+)"
+ + newline
+ + rb"%%EOF"
+ + whitespace_optional
+ + rb"$",
+ re.DOTALL,
+ )
+ re_trailer_prev = re.compile(
+ whitespace_optional
+ + rb"trailer"
+ + whitespace_optional
+ + rb"<<(.*?>>)"
+ + newline
+ + rb"startxref"
+ + newline
+ + rb"([0-9]+)"
+ + newline
+ + rb"%%EOF"
+ + whitespace_optional,
+ re.DOTALL,
+ )
+
+ def read_trailer(self):
+ search_start_offset = len(self.buf) - 16384
+ if search_start_offset < self.start_offset:
+ search_start_offset = self.start_offset
+ m = self.re_trailer_end.search(self.buf, search_start_offset)
+ check_format_condition(m, "trailer end not found")
+ # make sure we found the LAST trailer
+ last_match = m
+ while m:
+ last_match = m
+ m = self.re_trailer_end.search(self.buf, m.start() + 16)
+ if not m:
+ m = last_match
+ trailer_data = m.group(1)
+ self.last_xref_section_offset = int(m.group(2))
+ self.trailer_dict = self.interpret_trailer(trailer_data)
+ self.xref_table = XrefTable()
+ self.read_xref_table(xref_section_offset=self.last_xref_section_offset)
+ if b"Prev" in self.trailer_dict:
+ self.read_prev_trailer(self.trailer_dict[b"Prev"])
+
+ def read_prev_trailer(self, xref_section_offset):
+ trailer_offset = self.read_xref_table(xref_section_offset=xref_section_offset)
+ m = self.re_trailer_prev.search(
+ self.buf[trailer_offset : trailer_offset + 16384]
+ )
+ check_format_condition(m, "previous trailer not found")
+ trailer_data = m.group(1)
+ check_format_condition(
+ int(m.group(2)) == xref_section_offset,
+ "xref section offset in previous trailer doesn't match what was expected",
+ )
+ trailer_dict = self.interpret_trailer(trailer_data)
+ if b"Prev" in trailer_dict:
+ self.read_prev_trailer(trailer_dict[b"Prev"])
+
+ re_whitespace_optional = re.compile(whitespace_optional)
+ re_name = re.compile(
+ whitespace_optional
+ + rb"/([!-$&'*-.0-;=?-Z\\^-z|~]+)(?="
+ + delimiter_or_ws
+ + rb")"
+ )
+ re_dict_start = re.compile(whitespace_optional + rb"<<")
+ re_dict_end = re.compile(whitespace_optional + rb">>" + whitespace_optional)
+
+ @classmethod
+ def interpret_trailer(cls, trailer_data):
+ trailer = {}
+ offset = 0
+ while True:
+ m = cls.re_name.match(trailer_data, offset)
+ if not m:
+ m = cls.re_dict_end.match(trailer_data, offset)
+ check_format_condition(
+ m and m.end() == len(trailer_data),
+ "name not found in trailer, remaining data: "
+ + repr(trailer_data[offset:]),
+ )
+ break
+ key = cls.interpret_name(m.group(1))
+ value, offset = cls.get_value(trailer_data, m.end())
+ trailer[key] = value
+ check_format_condition(
+ b"Size" in trailer and isinstance(trailer[b"Size"], int),
+ "/Size not in trailer or not an integer",
+ )
+ check_format_condition(
+ b"Root" in trailer and isinstance(trailer[b"Root"], IndirectReference),
+ "/Root not in trailer or not an indirect reference",
+ )
+ return trailer
+
+ re_hashes_in_name = re.compile(rb"([^#]*)(#([0-9a-fA-F]{2}))?")
+
+ @classmethod
+ def interpret_name(cls, raw, as_text=False):
+ name = b""
+ for m in cls.re_hashes_in_name.finditer(raw):
+ if m.group(3):
+ name += m.group(1) + bytearray.fromhex(m.group(3).decode("us-ascii"))
+ else:
+ name += m.group(1)
+ if as_text:
+ return name.decode("utf-8")
+ else:
+ return bytes(name)
+
+ re_null = re.compile(whitespace_optional + rb"null(?=" + delimiter_or_ws + rb")")
+ re_true = re.compile(whitespace_optional + rb"true(?=" + delimiter_or_ws + rb")")
+ re_false = re.compile(whitespace_optional + rb"false(?=" + delimiter_or_ws + rb")")
+ re_int = re.compile(
+ whitespace_optional + rb"([-+]?[0-9]+)(?=" + delimiter_or_ws + rb")"
+ )
+ re_real = re.compile(
+ whitespace_optional
+ + rb"([-+]?([0-9]+\.[0-9]*|[0-9]*\.[0-9]+))(?="
+ + delimiter_or_ws
+ + rb")"
+ )
+ re_array_start = re.compile(whitespace_optional + rb"\[")
+ re_array_end = re.compile(whitespace_optional + rb"]")
+ re_string_hex = re.compile(
+ whitespace_optional + rb"<(" + whitespace_or_hex + rb"*)>"
+ )
+ re_string_lit = re.compile(whitespace_optional + rb"\(")
+ re_indirect_reference = re.compile(
+ whitespace_optional
+ + rb"([-+]?[0-9]+)"
+ + whitespace_mandatory
+ + rb"([-+]?[0-9]+)"
+ + whitespace_mandatory
+ + rb"R(?="
+ + delimiter_or_ws
+ + rb")"
+ )
+ re_indirect_def_start = re.compile(
+ whitespace_optional
+ + rb"([-+]?[0-9]+)"
+ + whitespace_mandatory
+ + rb"([-+]?[0-9]+)"
+ + whitespace_mandatory
+ + rb"obj(?="
+ + delimiter_or_ws
+ + rb")"
+ )
+ re_indirect_def_end = re.compile(
+ whitespace_optional + rb"endobj(?=" + delimiter_or_ws + rb")"
+ )
+ re_comment = re.compile(
+ rb"(" + whitespace_optional + rb"%[^\r\n]*" + newline + rb")*"
+ )
+ re_stream_start = re.compile(whitespace_optional + rb"stream\r?\n")
+ re_stream_end = re.compile(
+ whitespace_optional + rb"endstream(?=" + delimiter_or_ws + rb")"
+ )
+
+ @classmethod
+ def get_value(cls, data, offset, expect_indirect=None, max_nesting=-1):
+ if max_nesting == 0:
+ return None, None
+ m = cls.re_comment.match(data, offset)
+ if m:
+ offset = m.end()
+ m = cls.re_indirect_def_start.match(data, offset)
+ if m:
+ check_format_condition(
+ int(m.group(1)) > 0,
+ "indirect object definition: object ID must be greater than 0",
+ )
+ check_format_condition(
+ int(m.group(2)) >= 0,
+ "indirect object definition: generation must be non-negative",
+ )
+ check_format_condition(
+ expect_indirect is None
+ or expect_indirect
+ == IndirectReference(int(m.group(1)), int(m.group(2))),
+ "indirect object definition different than expected",
+ )
+ object, offset = cls.get_value(data, m.end(), max_nesting=max_nesting - 1)
+ if offset is None:
+ return object, None
+ m = cls.re_indirect_def_end.match(data, offset)
+ check_format_condition(m, "indirect object definition end not found")
+ return object, m.end()
+ check_format_condition(
+ not expect_indirect, "indirect object definition not found"
+ )
+ m = cls.re_indirect_reference.match(data, offset)
+ if m:
+ check_format_condition(
+ int(m.group(1)) > 0,
+ "indirect object reference: object ID must be greater than 0",
+ )
+ check_format_condition(
+ int(m.group(2)) >= 0,
+ "indirect object reference: generation must be non-negative",
+ )
+ return IndirectReference(int(m.group(1)), int(m.group(2))), m.end()
+ m = cls.re_dict_start.match(data, offset)
+ if m:
+ offset = m.end()
+ result = {}
+ m = cls.re_dict_end.match(data, offset)
+ while not m:
+ key, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1)
+ if offset is None:
+ return result, None
+ value, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1)
+ result[key] = value
+ if offset is None:
+ return result, None
+ m = cls.re_dict_end.match(data, offset)
+ offset = m.end()
+ m = cls.re_stream_start.match(data, offset)
+ if m:
+ try:
+ stream_len = int(result[b"Length"])
+ except (TypeError, KeyError, ValueError) as e:
+ msg = "bad or missing Length in stream dict (%r)" % result.get(
+ b"Length", None
+ )
+ raise PdfFormatError(msg) from e
+ stream_data = data[m.end() : m.end() + stream_len]
+ m = cls.re_stream_end.match(data, m.end() + stream_len)
+ check_format_condition(m, "stream end not found")
+ offset = m.end()
+ result = PdfStream(PdfDict(result), stream_data)
+ else:
+ result = PdfDict(result)
+ return result, offset
+ m = cls.re_array_start.match(data, offset)
+ if m:
+ offset = m.end()
+ result = []
+ m = cls.re_array_end.match(data, offset)
+ while not m:
+ value, offset = cls.get_value(data, offset, max_nesting=max_nesting - 1)
+ result.append(value)
+ if offset is None:
+ return result, None
+ m = cls.re_array_end.match(data, offset)
+ return result, m.end()
+ m = cls.re_null.match(data, offset)
+ if m:
+ return None, m.end()
+ m = cls.re_true.match(data, offset)
+ if m:
+ return True, m.end()
+ m = cls.re_false.match(data, offset)
+ if m:
+ return False, m.end()
+ m = cls.re_name.match(data, offset)
+ if m:
+ return PdfName(cls.interpret_name(m.group(1))), m.end()
+ m = cls.re_int.match(data, offset)
+ if m:
+ return int(m.group(1)), m.end()
+ m = cls.re_real.match(data, offset)
+ if m:
+ # XXX Decimal instead of float???
+ return float(m.group(1)), m.end()
+ m = cls.re_string_hex.match(data, offset)
+ if m:
+ # filter out whitespace
+ hex_string = bytearray(
+ b for b in m.group(1) if b in b"0123456789abcdefABCDEF"
+ )
+ if len(hex_string) % 2 == 1:
+ # append a 0 if the length is not even - yes, at the end
+ hex_string.append(ord(b"0"))
+ return bytearray.fromhex(hex_string.decode("us-ascii")), m.end()
+ m = cls.re_string_lit.match(data, offset)
+ if m:
+ return cls.get_literal_string(data, m.end())
+ # return None, offset # fallback (only for debugging)
+ msg = "unrecognized object: " + repr(data[offset : offset + 32])
+ raise PdfFormatError(msg)
+
+ re_lit_str_token = re.compile(
+ rb"(\\[nrtbf()\\])|(\\[0-9]{1,3})|(\\(\r\n|\r|\n))|(\r\n|\r|\n)|(\()|(\))"
+ )
+ escaped_chars = {
+ b"n": b"\n",
+ b"r": b"\r",
+ b"t": b"\t",
+ b"b": b"\b",
+ b"f": b"\f",
+ b"(": b"(",
+ b")": b")",
+ b"\\": b"\\",
+ ord(b"n"): b"\n",
+ ord(b"r"): b"\r",
+ ord(b"t"): b"\t",
+ ord(b"b"): b"\b",
+ ord(b"f"): b"\f",
+ ord(b"("): b"(",
+ ord(b")"): b")",
+ ord(b"\\"): b"\\",
+ }
+
+ @classmethod
+ def get_literal_string(cls, data, offset):
+ nesting_depth = 0
+ result = bytearray()
+ for m in cls.re_lit_str_token.finditer(data, offset):
+ result.extend(data[offset : m.start()])
+ if m.group(1):
+ result.extend(cls.escaped_chars[m.group(1)[1]])
+ elif m.group(2):
+ result.append(int(m.group(2)[1:], 8))
+ elif m.group(3):
+ pass
+ elif m.group(5):
+ result.extend(b"\n")
+ elif m.group(6):
+ result.extend(b"(")
+ nesting_depth += 1
+ elif m.group(7):
+ if nesting_depth == 0:
+ return bytes(result), m.end()
+ result.extend(b")")
+ nesting_depth -= 1
+ offset = m.end()
+ msg = "unfinished literal string"
+ raise PdfFormatError(msg)
+
+ re_xref_section_start = re.compile(whitespace_optional + rb"xref" + newline)
+ re_xref_subsection_start = re.compile(
+ whitespace_optional
+ + rb"([0-9]+)"
+ + whitespace_mandatory
+ + rb"([0-9]+)"
+ + whitespace_optional
+ + newline_only
+ )
+ re_xref_entry = re.compile(rb"([0-9]{10}) ([0-9]{5}) ([fn])( \r| \n|\r\n)")
+
+ def read_xref_table(self, xref_section_offset):
+ subsection_found = False
+ m = self.re_xref_section_start.match(
+ self.buf, xref_section_offset + self.start_offset
+ )
+ check_format_condition(m, "xref section start not found")
+ offset = m.end()
+ while True:
+ m = self.re_xref_subsection_start.match(self.buf, offset)
+ if not m:
+ check_format_condition(
+ subsection_found, "xref subsection start not found"
+ )
+ break
+ subsection_found = True
+ offset = m.end()
+ first_object = int(m.group(1))
+ num_objects = int(m.group(2))
+ for i in range(first_object, first_object + num_objects):
+ m = self.re_xref_entry.match(self.buf, offset)
+ check_format_condition(m, "xref entry not found")
+ offset = m.end()
+ is_free = m.group(3) == b"f"
+ if not is_free:
+ generation = int(m.group(2))
+ new_entry = (int(m.group(1)), generation)
+ if i not in self.xref_table:
+ self.xref_table[i] = new_entry
+ return offset
+
+ def read_indirect(self, ref, max_nesting=-1):
+ offset, generation = self.xref_table[ref[0]]
+ check_format_condition(
+ generation == ref[1],
+ f"expected to find generation {ref[1]} for object ID {ref[0]} in xref "
+ f"table, instead found generation {generation} at offset {offset}",
+ )
+ value = self.get_value(
+ self.buf,
+ offset + self.start_offset,
+ expect_indirect=IndirectReference(*ref),
+ max_nesting=max_nesting,
+ )[0]
+ self.cached_objects[ref] = value
+ return value
+
+ def linearize_page_tree(self, node=None):
+ if node is None:
+ node = self.page_tree_root
+ check_format_condition(
+ node[b"Type"] == b"Pages", "/Type of page tree node is not /Pages"
+ )
+ pages = []
+ for kid in node[b"Kids"]:
+ kid_object = self.read_indirect(kid)
+ if kid_object[b"Type"] == b"Page":
+ pages.append(kid)
+ else:
+ pages.extend(self.linearize_page_tree(node=kid_object))
+ return pages
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/PixarImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/PixarImagePlugin.py
new file mode 100644
index 00000000..85027231
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/PixarImagePlugin.py
@@ -0,0 +1,69 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# PIXAR raster support for PIL
+#
+# history:
+# 97-01-29 fl Created
+#
+# notes:
+# This is incomplete; it is based on a few samples created with
+# Photoshop 2.5 and 3.0, and a summary description provided by
+# Greg Coats . Hopefully, "L" and
+# "RGBA" support will be added in future versions.
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1997.
+#
+# See the README file for information on usage and redistribution.
+#
+
+from . import Image, ImageFile
+from ._binary import i16le as i16
+
+#
+# helpers
+
+
+def _accept(prefix):
+ return prefix[:4] == b"\200\350\000\000"
+
+
+##
+# Image plugin for PIXAR raster images.
+
+
+class PixarImageFile(ImageFile.ImageFile):
+ format = "PIXAR"
+ format_description = "PIXAR raster image"
+
+ def _open(self):
+ # assuming a 4-byte magic label
+ s = self.fp.read(4)
+ if not _accept(s):
+ msg = "not a PIXAR file"
+ raise SyntaxError(msg)
+
+ # read rest of header
+ s = s + self.fp.read(508)
+
+ self._size = i16(s, 418), i16(s, 416)
+
+ # get channel/depth descriptions
+ mode = i16(s, 424), i16(s, 426)
+
+ if mode == (14, 2):
+ self._mode = "RGB"
+ # FIXME: to be continued...
+
+ # create tile descriptor (assuming "dumped")
+ self.tile = [("raw", (0, 0) + self.size, 1024, (self.mode, 0, 1))]
+
+
+#
+# --------------------------------------------------------------------
+
+Image.register_open(PixarImageFile.format, PixarImageFile, _accept)
+
+Image.register_extension(PixarImageFile.format, ".pxr")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/PngImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/PngImagePlugin.py
new file mode 100644
index 00000000..5e5a8cf6
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/PngImagePlugin.py
@@ -0,0 +1,1452 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# PNG support code
+#
+# See "PNG (Portable Network Graphics) Specification, version 1.0;
+# W3C Recommendation", 1996-10-01, Thomas Boutell (ed.).
+#
+# history:
+# 1996-05-06 fl Created (couldn't resist it)
+# 1996-12-14 fl Upgraded, added read and verify support (0.2)
+# 1996-12-15 fl Separate PNG stream parser
+# 1996-12-29 fl Added write support, added getchunks
+# 1996-12-30 fl Eliminated circular references in decoder (0.3)
+# 1998-07-12 fl Read/write 16-bit images as mode I (0.4)
+# 2001-02-08 fl Added transparency support (from Zircon) (0.5)
+# 2001-04-16 fl Don't close data source in "open" method (0.6)
+# 2004-02-24 fl Don't even pretend to support interlaced files (0.7)
+# 2004-08-31 fl Do basic sanity check on chunk identifiers (0.8)
+# 2004-09-20 fl Added PngInfo chunk container
+# 2004-12-18 fl Added DPI read support (based on code by Niki Spahiev)
+# 2008-08-13 fl Added tRNS support for RGB images
+# 2009-03-06 fl Support for preserving ICC profiles (by Florian Hoech)
+# 2009-03-08 fl Added zTXT support (from Lowell Alleman)
+# 2009-03-29 fl Read interlaced PNG files (from Conrado Porto Lopes Gouvua)
+#
+# Copyright (c) 1997-2009 by Secret Labs AB
+# Copyright (c) 1996 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+import itertools
+import logging
+import re
+import struct
+import warnings
+import zlib
+from enum import IntEnum
+
+from . import Image, ImageChops, ImageFile, ImagePalette, ImageSequence
+from ._binary import i16be as i16
+from ._binary import i32be as i32
+from ._binary import o8
+from ._binary import o16be as o16
+from ._binary import o32be as o32
+
+logger = logging.getLogger(__name__)
+
+is_cid = re.compile(rb"\w\w\w\w").match
+
+
+_MAGIC = b"\211PNG\r\n\032\n"
+
+
+_MODES = {
+ # supported bits/color combinations, and corresponding modes/rawmodes
+ # Greyscale
+ (1, 0): ("1", "1"),
+ (2, 0): ("L", "L;2"),
+ (4, 0): ("L", "L;4"),
+ (8, 0): ("L", "L"),
+ (16, 0): ("I", "I;16B"),
+ # Truecolour
+ (8, 2): ("RGB", "RGB"),
+ (16, 2): ("RGB", "RGB;16B"),
+ # Indexed-colour
+ (1, 3): ("P", "P;1"),
+ (2, 3): ("P", "P;2"),
+ (4, 3): ("P", "P;4"),
+ (8, 3): ("P", "P"),
+ # Greyscale with alpha
+ (8, 4): ("LA", "LA"),
+ (16, 4): ("RGBA", "LA;16B"), # LA;16B->LA not yet available
+ # Truecolour with alpha
+ (8, 6): ("RGBA", "RGBA"),
+ (16, 6): ("RGBA", "RGBA;16B"),
+}
+
+
+_simple_palette = re.compile(b"^\xff*\x00\xff*$")
+
+MAX_TEXT_CHUNK = ImageFile.SAFEBLOCK
+"""
+Maximum decompressed size for a iTXt or zTXt chunk.
+Eliminates decompression bombs where compressed chunks can expand 1000x.
+See :ref:`Text in PNG File Format`.
+"""
+MAX_TEXT_MEMORY = 64 * MAX_TEXT_CHUNK
+"""
+Set the maximum total text chunk size.
+See :ref:`Text in PNG File Format`.
+"""
+
+
+# APNG frame disposal modes
+class Disposal(IntEnum):
+ OP_NONE = 0
+ """
+ No disposal is done on this frame before rendering the next frame.
+ See :ref:`Saving APNG sequences`.
+ """
+ OP_BACKGROUND = 1
+ """
+ This frame’s modified region is cleared to fully transparent black before rendering
+ the next frame.
+ See :ref:`Saving APNG sequences`.
+ """
+ OP_PREVIOUS = 2
+ """
+ This frame’s modified region is reverted to the previous frame’s contents before
+ rendering the next frame.
+ See :ref:`Saving APNG sequences`.
+ """
+
+
+# APNG frame blend modes
+class Blend(IntEnum):
+ OP_SOURCE = 0
+ """
+ All color components of this frame, including alpha, overwrite the previous output
+ image contents.
+ See :ref:`Saving APNG sequences`.
+ """
+ OP_OVER = 1
+ """
+ This frame should be alpha composited with the previous output image contents.
+ See :ref:`Saving APNG sequences`.
+ """
+
+
+def _safe_zlib_decompress(s):
+ dobj = zlib.decompressobj()
+ plaintext = dobj.decompress(s, MAX_TEXT_CHUNK)
+ if dobj.unconsumed_tail:
+ msg = "Decompressed Data Too Large"
+ raise ValueError(msg)
+ return plaintext
+
+
+def _crc32(data, seed=0):
+ return zlib.crc32(data, seed) & 0xFFFFFFFF
+
+
+# --------------------------------------------------------------------
+# Support classes. Suitable for PNG and related formats like MNG etc.
+
+
+class ChunkStream:
+ def __init__(self, fp):
+ self.fp = fp
+ self.queue = []
+
+ def read(self):
+ """Fetch a new chunk. Returns header information."""
+ cid = None
+
+ if self.queue:
+ cid, pos, length = self.queue.pop()
+ self.fp.seek(pos)
+ else:
+ s = self.fp.read(8)
+ cid = s[4:]
+ pos = self.fp.tell()
+ length = i32(s)
+
+ if not is_cid(cid):
+ if not ImageFile.LOAD_TRUNCATED_IMAGES:
+ msg = f"broken PNG file (chunk {repr(cid)})"
+ raise SyntaxError(msg)
+
+ return cid, pos, length
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
+
+ def close(self):
+ self.queue = self.fp = None
+
+ def push(self, cid, pos, length):
+ self.queue.append((cid, pos, length))
+
+ def call(self, cid, pos, length):
+ """Call the appropriate chunk handler"""
+
+ logger.debug("STREAM %r %s %s", cid, pos, length)
+ return getattr(self, "chunk_" + cid.decode("ascii"))(pos, length)
+
+ def crc(self, cid, data):
+ """Read and verify checksum"""
+
+ # Skip CRC checks for ancillary chunks if allowed to load truncated
+ # images
+ # 5th byte of first char is 1 [specs, section 5.4]
+ if ImageFile.LOAD_TRUNCATED_IMAGES and (cid[0] >> 5 & 1):
+ self.crc_skip(cid, data)
+ return
+
+ try:
+ crc1 = _crc32(data, _crc32(cid))
+ crc2 = i32(self.fp.read(4))
+ if crc1 != crc2:
+ msg = f"broken PNG file (bad header checksum in {repr(cid)})"
+ raise SyntaxError(msg)
+ except struct.error as e:
+ msg = f"broken PNG file (incomplete checksum in {repr(cid)})"
+ raise SyntaxError(msg) from e
+
+ def crc_skip(self, cid, data):
+ """Read checksum"""
+
+ self.fp.read(4)
+
+ def verify(self, endchunk=b"IEND"):
+ # Simple approach; just calculate checksum for all remaining
+ # blocks. Must be called directly after open.
+
+ cids = []
+
+ while True:
+ try:
+ cid, pos, length = self.read()
+ except struct.error as e:
+ msg = "truncated PNG file"
+ raise OSError(msg) from e
+
+ if cid == endchunk:
+ break
+ self.crc(cid, ImageFile._safe_read(self.fp, length))
+ cids.append(cid)
+
+ return cids
+
+
+class iTXt(str):
+ """
+ Subclass of string to allow iTXt chunks to look like strings while
+ keeping their extra information
+
+ """
+
+ @staticmethod
+ def __new__(cls, text, lang=None, tkey=None):
+ """
+ :param cls: the class to use when creating the instance
+ :param text: value for this key
+ :param lang: language code
+ :param tkey: UTF-8 version of the key name
+ """
+
+ self = str.__new__(cls, text)
+ self.lang = lang
+ self.tkey = tkey
+ return self
+
+
+class PngInfo:
+ """
+ PNG chunk container (for use with save(pnginfo=))
+
+ """
+
+ def __init__(self):
+ self.chunks = []
+
+ def add(self, cid, data, after_idat=False):
+ """Appends an arbitrary chunk. Use with caution.
+
+ :param cid: a byte string, 4 bytes long.
+ :param data: a byte string of the encoded data
+ :param after_idat: for use with private chunks. Whether the chunk
+ should be written after IDAT
+
+ """
+
+ chunk = [cid, data]
+ if after_idat:
+ chunk.append(True)
+ self.chunks.append(tuple(chunk))
+
+ def add_itxt(self, key, value, lang="", tkey="", zip=False):
+ """Appends an iTXt chunk.
+
+ :param key: latin-1 encodable text key name
+ :param value: value for this key
+ :param lang: language code
+ :param tkey: UTF-8 version of the key name
+ :param zip: compression flag
+
+ """
+
+ if not isinstance(key, bytes):
+ key = key.encode("latin-1", "strict")
+ if not isinstance(value, bytes):
+ value = value.encode("utf-8", "strict")
+ if not isinstance(lang, bytes):
+ lang = lang.encode("utf-8", "strict")
+ if not isinstance(tkey, bytes):
+ tkey = tkey.encode("utf-8", "strict")
+
+ if zip:
+ self.add(
+ b"iTXt",
+ key + b"\0\x01\0" + lang + b"\0" + tkey + b"\0" + zlib.compress(value),
+ )
+ else:
+ self.add(b"iTXt", key + b"\0\0\0" + lang + b"\0" + tkey + b"\0" + value)
+
+ def add_text(self, key, value, zip=False):
+ """Appends a text chunk.
+
+ :param key: latin-1 encodable text key name
+ :param value: value for this key, text or an
+ :py:class:`PIL.PngImagePlugin.iTXt` instance
+ :param zip: compression flag
+
+ """
+ if isinstance(value, iTXt):
+ return self.add_itxt(key, value, value.lang, value.tkey, zip=zip)
+
+ # The tEXt chunk stores latin-1 text
+ if not isinstance(value, bytes):
+ try:
+ value = value.encode("latin-1", "strict")
+ except UnicodeError:
+ return self.add_itxt(key, value, zip=zip)
+
+ if not isinstance(key, bytes):
+ key = key.encode("latin-1", "strict")
+
+ if zip:
+ self.add(b"zTXt", key + b"\0\0" + zlib.compress(value))
+ else:
+ self.add(b"tEXt", key + b"\0" + value)
+
+
+# --------------------------------------------------------------------
+# PNG image stream (IHDR/IEND)
+
+
+class PngStream(ChunkStream):
+ def __init__(self, fp):
+ super().__init__(fp)
+
+ # local copies of Image attributes
+ self.im_info = {}
+ self.im_text = {}
+ self.im_size = (0, 0)
+ self.im_mode = None
+ self.im_tile = None
+ self.im_palette = None
+ self.im_custom_mimetype = None
+ self.im_n_frames = None
+ self._seq_num = None
+ self.rewind_state = None
+
+ self.text_memory = 0
+
+ def check_text_memory(self, chunklen):
+ self.text_memory += chunklen
+ if self.text_memory > MAX_TEXT_MEMORY:
+ msg = (
+ "Too much memory used in text chunks: "
+ f"{self.text_memory}>MAX_TEXT_MEMORY"
+ )
+ raise ValueError(msg)
+
+ def save_rewind(self):
+ self.rewind_state = {
+ "info": self.im_info.copy(),
+ "tile": self.im_tile,
+ "seq_num": self._seq_num,
+ }
+
+ def rewind(self):
+ self.im_info = self.rewind_state["info"]
+ self.im_tile = self.rewind_state["tile"]
+ self._seq_num = self.rewind_state["seq_num"]
+
+ def chunk_iCCP(self, pos, length):
+ # ICC profile
+ s = ImageFile._safe_read(self.fp, length)
+ # according to PNG spec, the iCCP chunk contains:
+ # Profile name 1-79 bytes (character string)
+ # Null separator 1 byte (null character)
+ # Compression method 1 byte (0)
+ # Compressed profile n bytes (zlib with deflate compression)
+ i = s.find(b"\0")
+ logger.debug("iCCP profile name %r", s[:i])
+ logger.debug("Compression method %s", s[i])
+ comp_method = s[i]
+ if comp_method != 0:
+ msg = f"Unknown compression method {comp_method} in iCCP chunk"
+ raise SyntaxError(msg)
+ try:
+ icc_profile = _safe_zlib_decompress(s[i + 2 :])
+ except ValueError:
+ if ImageFile.LOAD_TRUNCATED_IMAGES:
+ icc_profile = None
+ else:
+ raise
+ except zlib.error:
+ icc_profile = None # FIXME
+ self.im_info["icc_profile"] = icc_profile
+ return s
+
+ def chunk_IHDR(self, pos, length):
+ # image header
+ s = ImageFile._safe_read(self.fp, length)
+ if length < 13:
+ if ImageFile.LOAD_TRUNCATED_IMAGES:
+ return s
+ msg = "Truncated IHDR chunk"
+ raise ValueError(msg)
+ self.im_size = i32(s, 0), i32(s, 4)
+ try:
+ self.im_mode, self.im_rawmode = _MODES[(s[8], s[9])]
+ except Exception:
+ pass
+ if s[12]:
+ self.im_info["interlace"] = 1
+ if s[11]:
+ msg = "unknown filter category"
+ raise SyntaxError(msg)
+ return s
+
+ def chunk_IDAT(self, pos, length):
+ # image data
+ if "bbox" in self.im_info:
+ tile = [("zip", self.im_info["bbox"], pos, self.im_rawmode)]
+ else:
+ if self.im_n_frames is not None:
+ self.im_info["default_image"] = True
+ tile = [("zip", (0, 0) + self.im_size, pos, self.im_rawmode)]
+ self.im_tile = tile
+ self.im_idat = length
+ raise EOFError
+
+ def chunk_IEND(self, pos, length):
+ # end of PNG image
+ raise EOFError
+
+ def chunk_PLTE(self, pos, length):
+ # palette
+ s = ImageFile._safe_read(self.fp, length)
+ if self.im_mode == "P":
+ self.im_palette = "RGB", s
+ return s
+
+ def chunk_tRNS(self, pos, length):
+ # transparency
+ s = ImageFile._safe_read(self.fp, length)
+ if self.im_mode == "P":
+ if _simple_palette.match(s):
+ # tRNS contains only one full-transparent entry,
+ # other entries are full opaque
+ i = s.find(b"\0")
+ if i >= 0:
+ self.im_info["transparency"] = i
+ else:
+ # otherwise, we have a byte string with one alpha value
+ # for each palette entry
+ self.im_info["transparency"] = s
+ elif self.im_mode in ("1", "L", "I"):
+ self.im_info["transparency"] = i16(s)
+ elif self.im_mode == "RGB":
+ self.im_info["transparency"] = i16(s), i16(s, 2), i16(s, 4)
+ return s
+
+ def chunk_gAMA(self, pos, length):
+ # gamma setting
+ s = ImageFile._safe_read(self.fp, length)
+ self.im_info["gamma"] = i32(s) / 100000.0
+ return s
+
+ def chunk_cHRM(self, pos, length):
+ # chromaticity, 8 unsigned ints, actual value is scaled by 100,000
+ # WP x,y, Red x,y, Green x,y Blue x,y
+
+ s = ImageFile._safe_read(self.fp, length)
+ raw_vals = struct.unpack(">%dI" % (len(s) // 4), s)
+ self.im_info["chromaticity"] = tuple(elt / 100000.0 for elt in raw_vals)
+ return s
+
+ def chunk_sRGB(self, pos, length):
+ # srgb rendering intent, 1 byte
+ # 0 perceptual
+ # 1 relative colorimetric
+ # 2 saturation
+ # 3 absolute colorimetric
+
+ s = ImageFile._safe_read(self.fp, length)
+ if length < 1:
+ if ImageFile.LOAD_TRUNCATED_IMAGES:
+ return s
+ msg = "Truncated sRGB chunk"
+ raise ValueError(msg)
+ self.im_info["srgb"] = s[0]
+ return s
+
+ def chunk_pHYs(self, pos, length):
+ # pixels per unit
+ s = ImageFile._safe_read(self.fp, length)
+ if length < 9:
+ if ImageFile.LOAD_TRUNCATED_IMAGES:
+ return s
+ msg = "Truncated pHYs chunk"
+ raise ValueError(msg)
+ px, py = i32(s, 0), i32(s, 4)
+ unit = s[8]
+ if unit == 1: # meter
+ dpi = px * 0.0254, py * 0.0254
+ self.im_info["dpi"] = dpi
+ elif unit == 0:
+ self.im_info["aspect"] = px, py
+ return s
+
+ def chunk_tEXt(self, pos, length):
+ # text
+ s = ImageFile._safe_read(self.fp, length)
+ try:
+ k, v = s.split(b"\0", 1)
+ except ValueError:
+ # fallback for broken tEXt tags
+ k = s
+ v = b""
+ if k:
+ k = k.decode("latin-1", "strict")
+ v_str = v.decode("latin-1", "replace")
+
+ self.im_info[k] = v if k == "exif" else v_str
+ self.im_text[k] = v_str
+ self.check_text_memory(len(v_str))
+
+ return s
+
+ def chunk_zTXt(self, pos, length):
+ # compressed text
+ s = ImageFile._safe_read(self.fp, length)
+ try:
+ k, v = s.split(b"\0", 1)
+ except ValueError:
+ k = s
+ v = b""
+ if v:
+ comp_method = v[0]
+ else:
+ comp_method = 0
+ if comp_method != 0:
+ msg = f"Unknown compression method {comp_method} in zTXt chunk"
+ raise SyntaxError(msg)
+ try:
+ v = _safe_zlib_decompress(v[1:])
+ except ValueError:
+ if ImageFile.LOAD_TRUNCATED_IMAGES:
+ v = b""
+ else:
+ raise
+ except zlib.error:
+ v = b""
+
+ if k:
+ k = k.decode("latin-1", "strict")
+ v = v.decode("latin-1", "replace")
+
+ self.im_info[k] = self.im_text[k] = v
+ self.check_text_memory(len(v))
+
+ return s
+
+ def chunk_iTXt(self, pos, length):
+ # international text
+ r = s = ImageFile._safe_read(self.fp, length)
+ try:
+ k, r = r.split(b"\0", 1)
+ except ValueError:
+ return s
+ if len(r) < 2:
+ return s
+ cf, cm, r = r[0], r[1], r[2:]
+ try:
+ lang, tk, v = r.split(b"\0", 2)
+ except ValueError:
+ return s
+ if cf != 0:
+ if cm == 0:
+ try:
+ v = _safe_zlib_decompress(v)
+ except ValueError:
+ if ImageFile.LOAD_TRUNCATED_IMAGES:
+ return s
+ else:
+ raise
+ except zlib.error:
+ return s
+ else:
+ return s
+ try:
+ k = k.decode("latin-1", "strict")
+ lang = lang.decode("utf-8", "strict")
+ tk = tk.decode("utf-8", "strict")
+ v = v.decode("utf-8", "strict")
+ except UnicodeError:
+ return s
+
+ self.im_info[k] = self.im_text[k] = iTXt(v, lang, tk)
+ self.check_text_memory(len(v))
+
+ return s
+
+ def chunk_eXIf(self, pos, length):
+ s = ImageFile._safe_read(self.fp, length)
+ self.im_info["exif"] = b"Exif\x00\x00" + s
+ return s
+
+ # APNG chunks
+ def chunk_acTL(self, pos, length):
+ s = ImageFile._safe_read(self.fp, length)
+ if length < 8:
+ if ImageFile.LOAD_TRUNCATED_IMAGES:
+ return s
+ msg = "APNG contains truncated acTL chunk"
+ raise ValueError(msg)
+ if self.im_n_frames is not None:
+ self.im_n_frames = None
+ warnings.warn("Invalid APNG, will use default PNG image if possible")
+ return s
+ n_frames = i32(s)
+ if n_frames == 0 or n_frames > 0x80000000:
+ warnings.warn("Invalid APNG, will use default PNG image if possible")
+ return s
+ self.im_n_frames = n_frames
+ self.im_info["loop"] = i32(s, 4)
+ self.im_custom_mimetype = "image/apng"
+ return s
+
+ def chunk_fcTL(self, pos, length):
+ s = ImageFile._safe_read(self.fp, length)
+ if length < 26:
+ if ImageFile.LOAD_TRUNCATED_IMAGES:
+ return s
+ msg = "APNG contains truncated fcTL chunk"
+ raise ValueError(msg)
+ seq = i32(s)
+ if (self._seq_num is None and seq != 0) or (
+ self._seq_num is not None and self._seq_num != seq - 1
+ ):
+ msg = "APNG contains frame sequence errors"
+ raise SyntaxError(msg)
+ self._seq_num = seq
+ width, height = i32(s, 4), i32(s, 8)
+ px, py = i32(s, 12), i32(s, 16)
+ im_w, im_h = self.im_size
+ if px + width > im_w or py + height > im_h:
+ msg = "APNG contains invalid frames"
+ raise SyntaxError(msg)
+ self.im_info["bbox"] = (px, py, px + width, py + height)
+ delay_num, delay_den = i16(s, 20), i16(s, 22)
+ if delay_den == 0:
+ delay_den = 100
+ self.im_info["duration"] = float(delay_num) / float(delay_den) * 1000
+ self.im_info["disposal"] = s[24]
+ self.im_info["blend"] = s[25]
+ return s
+
+ def chunk_fdAT(self, pos, length):
+ if length < 4:
+ if ImageFile.LOAD_TRUNCATED_IMAGES:
+ s = ImageFile._safe_read(self.fp, length)
+ return s
+ msg = "APNG contains truncated fDAT chunk"
+ raise ValueError(msg)
+ s = ImageFile._safe_read(self.fp, 4)
+ seq = i32(s)
+ if self._seq_num != seq - 1:
+ msg = "APNG contains frame sequence errors"
+ raise SyntaxError(msg)
+ self._seq_num = seq
+ return self.chunk_IDAT(pos + 4, length - 4)
+
+
+# --------------------------------------------------------------------
+# PNG reader
+
+
+def _accept(prefix):
+ return prefix[:8] == _MAGIC
+
+
+##
+# Image plugin for PNG images.
+
+
+class PngImageFile(ImageFile.ImageFile):
+ format = "PNG"
+ format_description = "Portable network graphics"
+
+ def _open(self):
+ if not _accept(self.fp.read(8)):
+ msg = "not a PNG file"
+ raise SyntaxError(msg)
+ self._fp = self.fp
+ self.__frame = 0
+
+ #
+ # Parse headers up to the first IDAT or fDAT chunk
+
+ self.private_chunks = []
+ self.png = PngStream(self.fp)
+
+ while True:
+ #
+ # get next chunk
+
+ cid, pos, length = self.png.read()
+
+ try:
+ s = self.png.call(cid, pos, length)
+ except EOFError:
+ break
+ except AttributeError:
+ logger.debug("%r %s %s (unknown)", cid, pos, length)
+ s = ImageFile._safe_read(self.fp, length)
+ if cid[1:2].islower():
+ self.private_chunks.append((cid, s))
+
+ self.png.crc(cid, s)
+
+ #
+ # Copy relevant attributes from the PngStream. An alternative
+ # would be to let the PngStream class modify these attributes
+ # directly, but that introduces circular references which are
+ # difficult to break if things go wrong in the decoder...
+ # (believe me, I've tried ;-)
+
+ self._mode = self.png.im_mode
+ self._size = self.png.im_size
+ self.info = self.png.im_info
+ self._text = None
+ self.tile = self.png.im_tile
+ self.custom_mimetype = self.png.im_custom_mimetype
+ self.n_frames = self.png.im_n_frames or 1
+ self.default_image = self.info.get("default_image", False)
+
+ if self.png.im_palette:
+ rawmode, data = self.png.im_palette
+ self.palette = ImagePalette.raw(rawmode, data)
+
+ if cid == b"fdAT":
+ self.__prepare_idat = length - 4
+ else:
+ self.__prepare_idat = length # used by load_prepare()
+
+ if self.png.im_n_frames is not None:
+ self._close_exclusive_fp_after_loading = False
+ self.png.save_rewind()
+ self.__rewind_idat = self.__prepare_idat
+ self.__rewind = self._fp.tell()
+ if self.default_image:
+ # IDAT chunk contains default image and not first animation frame
+ self.n_frames += 1
+ self._seek(0)
+ self.is_animated = self.n_frames > 1
+
+ @property
+ def text(self):
+ # experimental
+ if self._text is None:
+ # iTxt, tEXt and zTXt chunks may appear at the end of the file
+ # So load the file to ensure that they are read
+ if self.is_animated:
+ frame = self.__frame
+ # for APNG, seek to the final frame before loading
+ self.seek(self.n_frames - 1)
+ self.load()
+ if self.is_animated:
+ self.seek(frame)
+ return self._text
+
+ def verify(self):
+ """Verify PNG file"""
+
+ if self.fp is None:
+ msg = "verify must be called directly after open"
+ raise RuntimeError(msg)
+
+ # back up to beginning of IDAT block
+ self.fp.seek(self.tile[0][2] - 8)
+
+ self.png.verify()
+ self.png.close()
+
+ if self._exclusive_fp:
+ self.fp.close()
+ self.fp = None
+
+ def seek(self, frame):
+ if not self._seek_check(frame):
+ return
+ if frame < self.__frame:
+ self._seek(0, True)
+
+ last_frame = self.__frame
+ for f in range(self.__frame + 1, frame + 1):
+ try:
+ self._seek(f)
+ except EOFError as e:
+ self.seek(last_frame)
+ msg = "no more images in APNG file"
+ raise EOFError(msg) from e
+
+ def _seek(self, frame, rewind=False):
+ if frame == 0:
+ if rewind:
+ self._fp.seek(self.__rewind)
+ self.png.rewind()
+ self.__prepare_idat = self.__rewind_idat
+ self.im = None
+ if self.pyaccess:
+ self.pyaccess = None
+ self.info = self.png.im_info
+ self.tile = self.png.im_tile
+ self.fp = self._fp
+ self._prev_im = None
+ self.dispose = None
+ self.default_image = self.info.get("default_image", False)
+ self.dispose_op = self.info.get("disposal")
+ self.blend_op = self.info.get("blend")
+ self.dispose_extent = self.info.get("bbox")
+ self.__frame = 0
+ else:
+ if frame != self.__frame + 1:
+ msg = f"cannot seek to frame {frame}"
+ raise ValueError(msg)
+
+ # ensure previous frame was loaded
+ self.load()
+
+ if self.dispose:
+ self.im.paste(self.dispose, self.dispose_extent)
+ self._prev_im = self.im.copy()
+
+ self.fp = self._fp
+
+ # advance to the next frame
+ if self.__prepare_idat:
+ ImageFile._safe_read(self.fp, self.__prepare_idat)
+ self.__prepare_idat = 0
+ frame_start = False
+ while True:
+ self.fp.read(4) # CRC
+
+ try:
+ cid, pos, length = self.png.read()
+ except (struct.error, SyntaxError):
+ break
+
+ if cid == b"IEND":
+ msg = "No more images in APNG file"
+ raise EOFError(msg)
+ if cid == b"fcTL":
+ if frame_start:
+ # there must be at least one fdAT chunk between fcTL chunks
+ msg = "APNG missing frame data"
+ raise SyntaxError(msg)
+ frame_start = True
+
+ try:
+ self.png.call(cid, pos, length)
+ except UnicodeDecodeError:
+ break
+ except EOFError:
+ if cid == b"fdAT":
+ length -= 4
+ if frame_start:
+ self.__prepare_idat = length
+ break
+ ImageFile._safe_read(self.fp, length)
+ except AttributeError:
+ logger.debug("%r %s %s (unknown)", cid, pos, length)
+ ImageFile._safe_read(self.fp, length)
+
+ self.__frame = frame
+ self.tile = self.png.im_tile
+ self.dispose_op = self.info.get("disposal")
+ self.blend_op = self.info.get("blend")
+ self.dispose_extent = self.info.get("bbox")
+
+ if not self.tile:
+ raise EOFError
+
+ # setup frame disposal (actual disposal done when needed in the next _seek())
+ if self._prev_im is None and self.dispose_op == Disposal.OP_PREVIOUS:
+ self.dispose_op = Disposal.OP_BACKGROUND
+
+ if self.dispose_op == Disposal.OP_PREVIOUS:
+ self.dispose = self._prev_im.copy()
+ self.dispose = self._crop(self.dispose, self.dispose_extent)
+ elif self.dispose_op == Disposal.OP_BACKGROUND:
+ self.dispose = Image.core.fill(self.mode, self.size)
+ self.dispose = self._crop(self.dispose, self.dispose_extent)
+ else:
+ self.dispose = None
+
+ def tell(self):
+ return self.__frame
+
+ def load_prepare(self):
+ """internal: prepare to read PNG file"""
+
+ if self.info.get("interlace"):
+ self.decoderconfig = self.decoderconfig + (1,)
+
+ self.__idat = self.__prepare_idat # used by load_read()
+ ImageFile.ImageFile.load_prepare(self)
+
+ def load_read(self, read_bytes):
+ """internal: read more image data"""
+
+ while self.__idat == 0:
+ # end of chunk, skip forward to next one
+
+ self.fp.read(4) # CRC
+
+ cid, pos, length = self.png.read()
+
+ if cid not in [b"IDAT", b"DDAT", b"fdAT"]:
+ self.png.push(cid, pos, length)
+ return b""
+
+ if cid == b"fdAT":
+ try:
+ self.png.call(cid, pos, length)
+ except EOFError:
+ pass
+ self.__idat = length - 4 # sequence_num has already been read
+ else:
+ self.__idat = length # empty chunks are allowed
+
+ # read more data from this chunk
+ if read_bytes <= 0:
+ read_bytes = self.__idat
+ else:
+ read_bytes = min(read_bytes, self.__idat)
+
+ self.__idat = self.__idat - read_bytes
+
+ return self.fp.read(read_bytes)
+
+ def load_end(self):
+ """internal: finished reading image data"""
+ if self.__idat != 0:
+ self.fp.read(self.__idat)
+ while True:
+ self.fp.read(4) # CRC
+
+ try:
+ cid, pos, length = self.png.read()
+ except (struct.error, SyntaxError):
+ break
+
+ if cid == b"IEND":
+ break
+ elif cid == b"fcTL" and self.is_animated:
+ # start of the next frame, stop reading
+ self.__prepare_idat = 0
+ self.png.push(cid, pos, length)
+ break
+
+ try:
+ self.png.call(cid, pos, length)
+ except UnicodeDecodeError:
+ break
+ except EOFError:
+ if cid == b"fdAT":
+ length -= 4
+ ImageFile._safe_read(self.fp, length)
+ except AttributeError:
+ logger.debug("%r %s %s (unknown)", cid, pos, length)
+ s = ImageFile._safe_read(self.fp, length)
+ if cid[1:2].islower():
+ self.private_chunks.append((cid, s, True))
+ self._text = self.png.im_text
+ if not self.is_animated:
+ self.png.close()
+ self.png = None
+ else:
+ if self._prev_im and self.blend_op == Blend.OP_OVER:
+ updated = self._crop(self.im, self.dispose_extent)
+ if self.im.mode == "RGB" and "transparency" in self.info:
+ mask = updated.convert_transparent(
+ "RGBA", self.info["transparency"]
+ )
+ else:
+ mask = updated.convert("RGBA")
+ self._prev_im.paste(updated, self.dispose_extent, mask)
+ self.im = self._prev_im
+ if self.pyaccess:
+ self.pyaccess = None
+
+ def _getexif(self):
+ if "exif" not in self.info:
+ self.load()
+ if "exif" not in self.info and "Raw profile type exif" not in self.info:
+ return None
+ return self.getexif()._get_merged_dict()
+
+ def getexif(self):
+ if "exif" not in self.info:
+ self.load()
+
+ return super().getexif()
+
+ def getxmp(self):
+ """
+ Returns a dictionary containing the XMP tags.
+ Requires defusedxml to be installed.
+
+ :returns: XMP tags in a dictionary.
+ """
+ return (
+ self._getxmp(self.info["XML:com.adobe.xmp"])
+ if "XML:com.adobe.xmp" in self.info
+ else {}
+ )
+
+
+# --------------------------------------------------------------------
+# PNG writer
+
+_OUTMODES = {
+ # supported PIL modes, and corresponding rawmodes/bits/color combinations
+ "1": ("1", b"\x01\x00"),
+ "L;1": ("L;1", b"\x01\x00"),
+ "L;2": ("L;2", b"\x02\x00"),
+ "L;4": ("L;4", b"\x04\x00"),
+ "L": ("L", b"\x08\x00"),
+ "LA": ("LA", b"\x08\x04"),
+ "I": ("I;16B", b"\x10\x00"),
+ "I;16": ("I;16B", b"\x10\x00"),
+ "I;16B": ("I;16B", b"\x10\x00"),
+ "P;1": ("P;1", b"\x01\x03"),
+ "P;2": ("P;2", b"\x02\x03"),
+ "P;4": ("P;4", b"\x04\x03"),
+ "P": ("P", b"\x08\x03"),
+ "RGB": ("RGB", b"\x08\x02"),
+ "RGBA": ("RGBA", b"\x08\x06"),
+}
+
+
+def putchunk(fp, cid, *data):
+ """Write a PNG chunk (including CRC field)"""
+
+ data = b"".join(data)
+
+ fp.write(o32(len(data)) + cid)
+ fp.write(data)
+ crc = _crc32(data, _crc32(cid))
+ fp.write(o32(crc))
+
+
+class _idat:
+ # wrap output from the encoder in IDAT chunks
+
+ def __init__(self, fp, chunk):
+ self.fp = fp
+ self.chunk = chunk
+
+ def write(self, data):
+ self.chunk(self.fp, b"IDAT", data)
+
+
+class _fdat:
+ # wrap encoder output in fdAT chunks
+
+ def __init__(self, fp, chunk, seq_num):
+ self.fp = fp
+ self.chunk = chunk
+ self.seq_num = seq_num
+
+ def write(self, data):
+ self.chunk(self.fp, b"fdAT", o32(self.seq_num), data)
+ self.seq_num += 1
+
+
+def _write_multiple_frames(im, fp, chunk, rawmode, default_image, append_images):
+ duration = im.encoderinfo.get("duration", im.info.get("duration", 0))
+ loop = im.encoderinfo.get("loop", im.info.get("loop", 0))
+ disposal = im.encoderinfo.get("disposal", im.info.get("disposal", Disposal.OP_NONE))
+ blend = im.encoderinfo.get("blend", im.info.get("blend", Blend.OP_SOURCE))
+
+ if default_image:
+ chain = itertools.chain(append_images)
+ else:
+ chain = itertools.chain([im], append_images)
+
+ im_frames = []
+ frame_count = 0
+ for im_seq in chain:
+ for im_frame in ImageSequence.Iterator(im_seq):
+ if im_frame.mode == rawmode:
+ im_frame = im_frame.copy()
+ else:
+ im_frame = im_frame.convert(rawmode)
+ encoderinfo = im.encoderinfo.copy()
+ if isinstance(duration, (list, tuple)):
+ encoderinfo["duration"] = duration[frame_count]
+ if isinstance(disposal, (list, tuple)):
+ encoderinfo["disposal"] = disposal[frame_count]
+ if isinstance(blend, (list, tuple)):
+ encoderinfo["blend"] = blend[frame_count]
+ frame_count += 1
+
+ if im_frames:
+ previous = im_frames[-1]
+ prev_disposal = previous["encoderinfo"].get("disposal")
+ prev_blend = previous["encoderinfo"].get("blend")
+ if prev_disposal == Disposal.OP_PREVIOUS and len(im_frames) < 2:
+ prev_disposal = Disposal.OP_BACKGROUND
+
+ if prev_disposal == Disposal.OP_BACKGROUND:
+ base_im = previous["im"].copy()
+ dispose = Image.core.fill("RGBA", im.size, (0, 0, 0, 0))
+ bbox = previous["bbox"]
+ if bbox:
+ dispose = dispose.crop(bbox)
+ else:
+ bbox = (0, 0) + im.size
+ base_im.paste(dispose, bbox)
+ elif prev_disposal == Disposal.OP_PREVIOUS:
+ base_im = im_frames[-2]["im"]
+ else:
+ base_im = previous["im"]
+ delta = ImageChops.subtract_modulo(
+ im_frame.convert("RGBA"), base_im.convert("RGBA")
+ )
+ bbox = delta.getbbox(alpha_only=False)
+ if (
+ not bbox
+ and prev_disposal == encoderinfo.get("disposal")
+ and prev_blend == encoderinfo.get("blend")
+ ):
+ previous["encoderinfo"]["duration"] += encoderinfo.get(
+ "duration", duration
+ )
+ continue
+ else:
+ bbox = None
+ if "duration" not in encoderinfo:
+ encoderinfo["duration"] = duration
+ im_frames.append({"im": im_frame, "bbox": bbox, "encoderinfo": encoderinfo})
+
+ # animation control
+ chunk(
+ fp,
+ b"acTL",
+ o32(len(im_frames)), # 0: num_frames
+ o32(loop), # 4: num_plays
+ )
+
+ # default image IDAT (if it exists)
+ if default_image:
+ if im.mode != rawmode:
+ im = im.convert(rawmode)
+ ImageFile._save(im, _idat(fp, chunk), [("zip", (0, 0) + im.size, 0, rawmode)])
+
+ seq_num = 0
+ for frame, frame_data in enumerate(im_frames):
+ im_frame = frame_data["im"]
+ if not frame_data["bbox"]:
+ bbox = (0, 0) + im_frame.size
+ else:
+ bbox = frame_data["bbox"]
+ im_frame = im_frame.crop(bbox)
+ size = im_frame.size
+ encoderinfo = frame_data["encoderinfo"]
+ frame_duration = int(round(encoderinfo["duration"]))
+ frame_disposal = encoderinfo.get("disposal", disposal)
+ frame_blend = encoderinfo.get("blend", blend)
+ # frame control
+ chunk(
+ fp,
+ b"fcTL",
+ o32(seq_num), # sequence_number
+ o32(size[0]), # width
+ o32(size[1]), # height
+ o32(bbox[0]), # x_offset
+ o32(bbox[1]), # y_offset
+ o16(frame_duration), # delay_numerator
+ o16(1000), # delay_denominator
+ o8(frame_disposal), # dispose_op
+ o8(frame_blend), # blend_op
+ )
+ seq_num += 1
+ # frame data
+ if frame == 0 and not default_image:
+ # first frame must be in IDAT chunks for backwards compatibility
+ ImageFile._save(
+ im_frame,
+ _idat(fp, chunk),
+ [("zip", (0, 0) + im_frame.size, 0, rawmode)],
+ )
+ else:
+ fdat_chunks = _fdat(fp, chunk, seq_num)
+ ImageFile._save(
+ im_frame,
+ fdat_chunks,
+ [("zip", (0, 0) + im_frame.size, 0, rawmode)],
+ )
+ seq_num = fdat_chunks.seq_num
+
+
+def _save_all(im, fp, filename):
+ _save(im, fp, filename, save_all=True)
+
+
+def _save(im, fp, filename, chunk=putchunk, save_all=False):
+ # save an image to disk (called by the save method)
+
+ if save_all:
+ default_image = im.encoderinfo.get(
+ "default_image", im.info.get("default_image")
+ )
+ modes = set()
+ append_images = im.encoderinfo.get("append_images", [])
+ for im_seq in itertools.chain([im], append_images):
+ for im_frame in ImageSequence.Iterator(im_seq):
+ modes.add(im_frame.mode)
+ for mode in ("RGBA", "RGB", "P"):
+ if mode in modes:
+ break
+ else:
+ mode = modes.pop()
+ else:
+ mode = im.mode
+
+ if mode == "P":
+ #
+ # attempt to minimize storage requirements for palette images
+ if "bits" in im.encoderinfo:
+ # number of bits specified by user
+ colors = min(1 << im.encoderinfo["bits"], 256)
+ else:
+ # check palette contents
+ if im.palette:
+ colors = max(min(len(im.palette.getdata()[1]) // 3, 256), 1)
+ else:
+ colors = 256
+
+ if colors <= 16:
+ if colors <= 2:
+ bits = 1
+ elif colors <= 4:
+ bits = 2
+ else:
+ bits = 4
+ mode = f"{mode};{bits}"
+
+ # encoder options
+ im.encoderconfig = (
+ im.encoderinfo.get("optimize", False),
+ im.encoderinfo.get("compress_level", -1),
+ im.encoderinfo.get("compress_type", -1),
+ im.encoderinfo.get("dictionary", b""),
+ )
+
+ # get the corresponding PNG mode
+ try:
+ rawmode, mode = _OUTMODES[mode]
+ except KeyError as e:
+ msg = f"cannot write mode {mode} as PNG"
+ raise OSError(msg) from e
+
+ #
+ # write minimal PNG file
+
+ fp.write(_MAGIC)
+
+ chunk(
+ fp,
+ b"IHDR",
+ o32(im.size[0]), # 0: size
+ o32(im.size[1]),
+ mode, # 8: depth/type
+ b"\0", # 10: compression
+ b"\0", # 11: filter category
+ b"\0", # 12: interlace flag
+ )
+
+ chunks = [b"cHRM", b"gAMA", b"sBIT", b"sRGB", b"tIME"]
+
+ icc = im.encoderinfo.get("icc_profile", im.info.get("icc_profile"))
+ if icc:
+ # ICC profile
+ # according to PNG spec, the iCCP chunk contains:
+ # Profile name 1-79 bytes (character string)
+ # Null separator 1 byte (null character)
+ # Compression method 1 byte (0)
+ # Compressed profile n bytes (zlib with deflate compression)
+ name = b"ICC Profile"
+ data = name + b"\0\0" + zlib.compress(icc)
+ chunk(fp, b"iCCP", data)
+
+ # You must either have sRGB or iCCP.
+ # Disallow sRGB chunks when an iCCP-chunk has been emitted.
+ chunks.remove(b"sRGB")
+
+ info = im.encoderinfo.get("pnginfo")
+ if info:
+ chunks_multiple_allowed = [b"sPLT", b"iTXt", b"tEXt", b"zTXt"]
+ for info_chunk in info.chunks:
+ cid, data = info_chunk[:2]
+ if cid in chunks:
+ chunks.remove(cid)
+ chunk(fp, cid, data)
+ elif cid in chunks_multiple_allowed:
+ chunk(fp, cid, data)
+ elif cid[1:2].islower():
+ # Private chunk
+ after_idat = info_chunk[2:3]
+ if not after_idat:
+ chunk(fp, cid, data)
+
+ if im.mode == "P":
+ palette_byte_number = colors * 3
+ palette_bytes = im.im.getpalette("RGB")[:palette_byte_number]
+ while len(palette_bytes) < palette_byte_number:
+ palette_bytes += b"\0"
+ chunk(fp, b"PLTE", palette_bytes)
+
+ transparency = im.encoderinfo.get("transparency", im.info.get("transparency", None))
+
+ if transparency or transparency == 0:
+ if im.mode == "P":
+ # limit to actual palette size
+ alpha_bytes = colors
+ if isinstance(transparency, bytes):
+ chunk(fp, b"tRNS", transparency[:alpha_bytes])
+ else:
+ transparency = max(0, min(255, transparency))
+ alpha = b"\xFF" * transparency + b"\0"
+ chunk(fp, b"tRNS", alpha[:alpha_bytes])
+ elif im.mode in ("1", "L", "I"):
+ transparency = max(0, min(65535, transparency))
+ chunk(fp, b"tRNS", o16(transparency))
+ elif im.mode == "RGB":
+ red, green, blue = transparency
+ chunk(fp, b"tRNS", o16(red) + o16(green) + o16(blue))
+ else:
+ if "transparency" in im.encoderinfo:
+ # don't bother with transparency if it's an RGBA
+ # and it's in the info dict. It's probably just stale.
+ msg = "cannot use transparency for this mode"
+ raise OSError(msg)
+ else:
+ if im.mode == "P" and im.im.getpalettemode() == "RGBA":
+ alpha = im.im.getpalette("RGBA", "A")
+ alpha_bytes = colors
+ chunk(fp, b"tRNS", alpha[:alpha_bytes])
+
+ dpi = im.encoderinfo.get("dpi")
+ if dpi:
+ chunk(
+ fp,
+ b"pHYs",
+ o32(int(dpi[0] / 0.0254 + 0.5)),
+ o32(int(dpi[1] / 0.0254 + 0.5)),
+ b"\x01",
+ )
+
+ if info:
+ chunks = [b"bKGD", b"hIST"]
+ for info_chunk in info.chunks:
+ cid, data = info_chunk[:2]
+ if cid in chunks:
+ chunks.remove(cid)
+ chunk(fp, cid, data)
+
+ exif = im.encoderinfo.get("exif")
+ if exif:
+ if isinstance(exif, Image.Exif):
+ exif = exif.tobytes(8)
+ if exif.startswith(b"Exif\x00\x00"):
+ exif = exif[6:]
+ chunk(fp, b"eXIf", exif)
+
+ if save_all:
+ _write_multiple_frames(im, fp, chunk, rawmode, default_image, append_images)
+ else:
+ ImageFile._save(im, _idat(fp, chunk), [("zip", (0, 0) + im.size, 0, rawmode)])
+
+ if info:
+ for info_chunk in info.chunks:
+ cid, data = info_chunk[:2]
+ if cid[1:2].islower():
+ # Private chunk
+ after_idat = info_chunk[2:3]
+ if after_idat:
+ chunk(fp, cid, data)
+
+ chunk(fp, b"IEND", b"")
+
+ if hasattr(fp, "flush"):
+ fp.flush()
+
+
+# --------------------------------------------------------------------
+# PNG chunk converter
+
+
+def getchunks(im, **params):
+ """Return a list of PNG chunks representing this image."""
+
+ class collector:
+ data = []
+
+ def write(self, data):
+ pass
+
+ def append(self, chunk):
+ self.data.append(chunk)
+
+ def append(fp, cid, *data):
+ data = b"".join(data)
+ crc = o32(_crc32(data, _crc32(cid)))
+ fp.append((cid, data, crc))
+
+ fp = collector()
+
+ try:
+ im.encoderinfo = params
+ _save(im, fp, None, append)
+ finally:
+ del im.encoderinfo
+
+ return fp.data
+
+
+# --------------------------------------------------------------------
+# Registry
+
+Image.register_open(PngImageFile.format, PngImageFile, _accept)
+Image.register_save(PngImageFile.format, _save)
+Image.register_save_all(PngImageFile.format, _save_all)
+
+Image.register_extensions(PngImageFile.format, [".png", ".apng"])
+
+Image.register_mime(PngImageFile.format, "image/png")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/PpmImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/PpmImagePlugin.py
new file mode 100644
index 00000000..e480ab05
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/PpmImagePlugin.py
@@ -0,0 +1,347 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# PPM support for PIL
+#
+# History:
+# 96-03-24 fl Created
+# 98-03-06 fl Write RGBA images (as RGB, that is)
+#
+# Copyright (c) Secret Labs AB 1997-98.
+# Copyright (c) Fredrik Lundh 1996.
+#
+# See the README file for information on usage and redistribution.
+#
+
+
+from . import Image, ImageFile
+from ._binary import i16be as i16
+from ._binary import o8
+from ._binary import o32le as o32
+
+#
+# --------------------------------------------------------------------
+
+b_whitespace = b"\x20\x09\x0a\x0b\x0c\x0d"
+
+MODES = {
+ # standard
+ b"P1": "1",
+ b"P2": "L",
+ b"P3": "RGB",
+ b"P4": "1",
+ b"P5": "L",
+ b"P6": "RGB",
+ # extensions
+ b"P0CMYK": "CMYK",
+ # PIL extensions (for test purposes only)
+ b"PyP": "P",
+ b"PyRGBA": "RGBA",
+ b"PyCMYK": "CMYK",
+}
+
+
+def _accept(prefix):
+ return prefix[0:1] == b"P" and prefix[1] in b"0123456y"
+
+
+##
+# Image plugin for PBM, PGM, and PPM images.
+
+
+class PpmImageFile(ImageFile.ImageFile):
+ format = "PPM"
+ format_description = "Pbmplus image"
+
+ def _read_magic(self):
+ magic = b""
+ # read until whitespace or longest available magic number
+ for _ in range(6):
+ c = self.fp.read(1)
+ if not c or c in b_whitespace:
+ break
+ magic += c
+ return magic
+
+ def _read_token(self):
+ token = b""
+ while len(token) <= 10: # read until next whitespace or limit of 10 characters
+ c = self.fp.read(1)
+ if not c:
+ break
+ elif c in b_whitespace: # token ended
+ if not token:
+ # skip whitespace at start
+ continue
+ break
+ elif c == b"#":
+ # ignores rest of the line; stops at CR, LF or EOF
+ while self.fp.read(1) not in b"\r\n":
+ pass
+ continue
+ token += c
+ if not token:
+ # Token was not even 1 byte
+ msg = "Reached EOF while reading header"
+ raise ValueError(msg)
+ elif len(token) > 10:
+ msg = f"Token too long in file header: {token.decode()}"
+ raise ValueError(msg)
+ return token
+
+ def _open(self):
+ magic_number = self._read_magic()
+ try:
+ mode = MODES[magic_number]
+ except KeyError:
+ msg = "not a PPM file"
+ raise SyntaxError(msg)
+
+ if magic_number in (b"P1", b"P4"):
+ self.custom_mimetype = "image/x-portable-bitmap"
+ elif magic_number in (b"P2", b"P5"):
+ self.custom_mimetype = "image/x-portable-graymap"
+ elif magic_number in (b"P3", b"P6"):
+ self.custom_mimetype = "image/x-portable-pixmap"
+
+ maxval = None
+ decoder_name = "raw"
+ if magic_number in (b"P1", b"P2", b"P3"):
+ decoder_name = "ppm_plain"
+ for ix in range(3):
+ token = int(self._read_token())
+ if ix == 0: # token is the x size
+ xsize = token
+ elif ix == 1: # token is the y size
+ ysize = token
+ if mode == "1":
+ self._mode = "1"
+ rawmode = "1;I"
+ break
+ else:
+ self._mode = rawmode = mode
+ elif ix == 2: # token is maxval
+ maxval = token
+ if not 0 < maxval < 65536:
+ msg = "maxval must be greater than 0 and less than 65536"
+ raise ValueError(msg)
+ if maxval > 255 and mode == "L":
+ self._mode = "I"
+
+ if decoder_name != "ppm_plain":
+ # If maxval matches a bit depth, use the raw decoder directly
+ if maxval == 65535 and mode == "L":
+ rawmode = "I;16B"
+ elif maxval != 255:
+ decoder_name = "ppm"
+
+ args = (rawmode, 0, 1) if decoder_name == "raw" else (rawmode, maxval)
+ self._size = xsize, ysize
+ self.tile = [(decoder_name, (0, 0, xsize, ysize), self.fp.tell(), args)]
+
+
+#
+# --------------------------------------------------------------------
+
+
+class PpmPlainDecoder(ImageFile.PyDecoder):
+ _pulls_fd = True
+
+ def _read_block(self):
+ return self.fd.read(ImageFile.SAFEBLOCK)
+
+ def _find_comment_end(self, block, start=0):
+ a = block.find(b"\n", start)
+ b = block.find(b"\r", start)
+ return min(a, b) if a * b > 0 else max(a, b) # lowest nonnegative index (or -1)
+
+ def _ignore_comments(self, block):
+ if self._comment_spans:
+ # Finish current comment
+ while block:
+ comment_end = self._find_comment_end(block)
+ if comment_end != -1:
+ # Comment ends in this block
+ # Delete tail of comment
+ block = block[comment_end + 1 :]
+ break
+ else:
+ # Comment spans whole block
+ # So read the next block, looking for the end
+ block = self._read_block()
+
+ # Search for any further comments
+ self._comment_spans = False
+ while True:
+ comment_start = block.find(b"#")
+ if comment_start == -1:
+ # No comment found
+ break
+ comment_end = self._find_comment_end(block, comment_start)
+ if comment_end != -1:
+ # Comment ends in this block
+ # Delete comment
+ block = block[:comment_start] + block[comment_end + 1 :]
+ else:
+ # Comment continues to next block(s)
+ block = block[:comment_start]
+ self._comment_spans = True
+ break
+ return block
+
+ def _decode_bitonal(self):
+ """
+ This is a separate method because in the plain PBM format, all data tokens are
+ exactly one byte, so the inter-token whitespace is optional.
+ """
+ data = bytearray()
+ total_bytes = self.state.xsize * self.state.ysize
+
+ while len(data) != total_bytes:
+ block = self._read_block() # read next block
+ if not block:
+ # eof
+ break
+
+ block = self._ignore_comments(block)
+
+ tokens = b"".join(block.split())
+ for token in tokens:
+ if token not in (48, 49):
+ msg = b"Invalid token for this mode: %s" % bytes([token])
+ raise ValueError(msg)
+ data = (data + tokens)[:total_bytes]
+ invert = bytes.maketrans(b"01", b"\xFF\x00")
+ return data.translate(invert)
+
+ def _decode_blocks(self, maxval):
+ data = bytearray()
+ max_len = 10
+ out_byte_count = 4 if self.mode == "I" else 1
+ out_max = 65535 if self.mode == "I" else 255
+ bands = Image.getmodebands(self.mode)
+ total_bytes = self.state.xsize * self.state.ysize * bands * out_byte_count
+
+ half_token = False
+ while len(data) != total_bytes:
+ block = self._read_block() # read next block
+ if not block:
+ if half_token:
+ block = bytearray(b" ") # flush half_token
+ else:
+ # eof
+ break
+
+ block = self._ignore_comments(block)
+
+ if half_token:
+ block = half_token + block # stitch half_token to new block
+ half_token = False
+
+ tokens = block.split()
+
+ if block and not block[-1:].isspace(): # block might split token
+ half_token = tokens.pop() # save half token for later
+ if len(half_token) > max_len: # prevent buildup of half_token
+ msg = (
+ b"Token too long found in data: %s" % half_token[: max_len + 1]
+ )
+ raise ValueError(msg)
+
+ for token in tokens:
+ if len(token) > max_len:
+ msg = b"Token too long found in data: %s" % token[: max_len + 1]
+ raise ValueError(msg)
+ value = int(token)
+ if value > maxval:
+ msg = f"Channel value too large for this mode: {value}"
+ raise ValueError(msg)
+ value = round(value / maxval * out_max)
+ data += o32(value) if self.mode == "I" else o8(value)
+ if len(data) == total_bytes: # finished!
+ break
+ return data
+
+ def decode(self, buffer):
+ self._comment_spans = False
+ if self.mode == "1":
+ data = self._decode_bitonal()
+ rawmode = "1;8"
+ else:
+ maxval = self.args[-1]
+ data = self._decode_blocks(maxval)
+ rawmode = "I;32" if self.mode == "I" else self.mode
+ self.set_as_raw(bytes(data), rawmode)
+ return -1, 0
+
+
+class PpmDecoder(ImageFile.PyDecoder):
+ _pulls_fd = True
+
+ def decode(self, buffer):
+ data = bytearray()
+ maxval = self.args[-1]
+ in_byte_count = 1 if maxval < 256 else 2
+ out_byte_count = 4 if self.mode == "I" else 1
+ out_max = 65535 if self.mode == "I" else 255
+ bands = Image.getmodebands(self.mode)
+ while len(data) < self.state.xsize * self.state.ysize * bands * out_byte_count:
+ pixels = self.fd.read(in_byte_count * bands)
+ if len(pixels) < in_byte_count * bands:
+ # eof
+ break
+ for b in range(bands):
+ value = (
+ pixels[b] if in_byte_count == 1 else i16(pixels, b * in_byte_count)
+ )
+ value = min(out_max, round(value / maxval * out_max))
+ data += o32(value) if self.mode == "I" else o8(value)
+ rawmode = "I;32" if self.mode == "I" else self.mode
+ self.set_as_raw(bytes(data), rawmode)
+ return -1, 0
+
+
+#
+# --------------------------------------------------------------------
+
+
+def _save(im, fp, filename):
+ if im.mode == "1":
+ rawmode, head = "1;I", b"P4"
+ elif im.mode == "L":
+ rawmode, head = "L", b"P5"
+ elif im.mode == "I":
+ rawmode, head = "I;16B", b"P5"
+ elif im.mode in ("RGB", "RGBA"):
+ rawmode, head = "RGB", b"P6"
+ else:
+ msg = f"cannot write mode {im.mode} as PPM"
+ raise OSError(msg)
+ fp.write(head + b"\n%d %d\n" % im.size)
+ if head == b"P6":
+ fp.write(b"255\n")
+ elif head == b"P5":
+ if rawmode == "L":
+ fp.write(b"255\n")
+ else:
+ fp.write(b"65535\n")
+ ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))])
+
+ # ALTERNATIVE: save via builtin debug function
+ # im._dump(filename)
+
+
+#
+# --------------------------------------------------------------------
+
+
+Image.register_open(PpmImageFile.format, PpmImageFile, _accept)
+Image.register_save(PpmImageFile.format, _save)
+
+Image.register_decoder("ppm", PpmDecoder)
+Image.register_decoder("ppm_plain", PpmPlainDecoder)
+
+Image.register_extensions(PpmImageFile.format, [".pbm", ".pgm", ".ppm", ".pnm"])
+
+Image.register_mime(PpmImageFile.format, "image/x-portable-anymap")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/PsdImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/PsdImagePlugin.py
new file mode 100644
index 00000000..2f019bb8
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/PsdImagePlugin.py
@@ -0,0 +1,303 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# Adobe PSD 2.5/3.0 file handling
+#
+# History:
+# 1995-09-01 fl Created
+# 1997-01-03 fl Read most PSD images
+# 1997-01-18 fl Fixed P and CMYK support
+# 2001-10-21 fl Added seek/tell support (for layers)
+#
+# Copyright (c) 1997-2001 by Secret Labs AB.
+# Copyright (c) 1995-2001 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+import io
+
+from . import Image, ImageFile, ImagePalette
+from ._binary import i8
+from ._binary import i16be as i16
+from ._binary import i32be as i32
+from ._binary import si16be as si16
+
+MODES = {
+ # (photoshop mode, bits) -> (pil mode, required channels)
+ (0, 1): ("1", 1),
+ (0, 8): ("L", 1),
+ (1, 8): ("L", 1),
+ (2, 8): ("P", 1),
+ (3, 8): ("RGB", 3),
+ (4, 8): ("CMYK", 4),
+ (7, 8): ("L", 1), # FIXME: multilayer
+ (8, 8): ("L", 1), # duotone
+ (9, 8): ("LAB", 3),
+}
+
+
+# --------------------------------------------------------------------.
+# read PSD images
+
+
+def _accept(prefix):
+ return prefix[:4] == b"8BPS"
+
+
+##
+# Image plugin for Photoshop images.
+
+
+class PsdImageFile(ImageFile.ImageFile):
+ format = "PSD"
+ format_description = "Adobe Photoshop"
+ _close_exclusive_fp_after_loading = False
+
+ def _open(self):
+ read = self.fp.read
+
+ #
+ # header
+
+ s = read(26)
+ if not _accept(s) or i16(s, 4) != 1:
+ msg = "not a PSD file"
+ raise SyntaxError(msg)
+
+ psd_bits = i16(s, 22)
+ psd_channels = i16(s, 12)
+ psd_mode = i16(s, 24)
+
+ mode, channels = MODES[(psd_mode, psd_bits)]
+
+ if channels > psd_channels:
+ msg = "not enough channels"
+ raise OSError(msg)
+ if mode == "RGB" and psd_channels == 4:
+ mode = "RGBA"
+ channels = 4
+
+ self._mode = mode
+ self._size = i32(s, 18), i32(s, 14)
+
+ #
+ # color mode data
+
+ size = i32(read(4))
+ if size:
+ data = read(size)
+ if mode == "P" and size == 768:
+ self.palette = ImagePalette.raw("RGB;L", data)
+
+ #
+ # image resources
+
+ self.resources = []
+
+ size = i32(read(4))
+ if size:
+ # load resources
+ end = self.fp.tell() + size
+ while self.fp.tell() < end:
+ read(4) # signature
+ id = i16(read(2))
+ name = read(i8(read(1)))
+ if not (len(name) & 1):
+ read(1) # padding
+ data = read(i32(read(4)))
+ if len(data) & 1:
+ read(1) # padding
+ self.resources.append((id, name, data))
+ if id == 1039: # ICC profile
+ self.info["icc_profile"] = data
+
+ #
+ # layer and mask information
+
+ self.layers = []
+
+ size = i32(read(4))
+ if size:
+ end = self.fp.tell() + size
+ size = i32(read(4))
+ if size:
+ _layer_data = io.BytesIO(ImageFile._safe_read(self.fp, size))
+ self.layers = _layerinfo(_layer_data, size)
+ self.fp.seek(end)
+ self.n_frames = len(self.layers)
+ self.is_animated = self.n_frames > 1
+
+ #
+ # image descriptor
+
+ self.tile = _maketile(self.fp, mode, (0, 0) + self.size, channels)
+
+ # keep the file open
+ self._fp = self.fp
+ self.frame = 1
+ self._min_frame = 1
+
+ def seek(self, layer):
+ if not self._seek_check(layer):
+ return
+
+ # seek to given layer (1..max)
+ try:
+ name, mode, bbox, tile = self.layers[layer - 1]
+ self._mode = mode
+ self.tile = tile
+ self.frame = layer
+ self.fp = self._fp
+ return name, bbox
+ except IndexError as e:
+ msg = "no such layer"
+ raise EOFError(msg) from e
+
+ def tell(self):
+ # return layer number (0=image, 1..max=layers)
+ return self.frame
+
+
+def _layerinfo(fp, ct_bytes):
+ # read layerinfo block
+ layers = []
+
+ def read(size):
+ return ImageFile._safe_read(fp, size)
+
+ ct = si16(read(2))
+
+ # sanity check
+ if ct_bytes < (abs(ct) * 20):
+ msg = "Layer block too short for number of layers requested"
+ raise SyntaxError(msg)
+
+ for _ in range(abs(ct)):
+ # bounding box
+ y0 = i32(read(4))
+ x0 = i32(read(4))
+ y1 = i32(read(4))
+ x1 = i32(read(4))
+
+ # image info
+ mode = []
+ ct_types = i16(read(2))
+ types = list(range(ct_types))
+ if len(types) > 4:
+ continue
+
+ for _ in types:
+ type = i16(read(2))
+
+ if type == 65535:
+ m = "A"
+ else:
+ m = "RGBA"[type]
+
+ mode.append(m)
+ read(4) # size
+
+ # figure out the image mode
+ mode.sort()
+ if mode == ["R"]:
+ mode = "L"
+ elif mode == ["B", "G", "R"]:
+ mode = "RGB"
+ elif mode == ["A", "B", "G", "R"]:
+ mode = "RGBA"
+ else:
+ mode = None # unknown
+
+ # skip over blend flags and extra information
+ read(12) # filler
+ name = ""
+ size = i32(read(4)) # length of the extra data field
+ if size:
+ data_end = fp.tell() + size
+
+ length = i32(read(4))
+ if length:
+ fp.seek(length - 16, io.SEEK_CUR)
+
+ length = i32(read(4))
+ if length:
+ fp.seek(length, io.SEEK_CUR)
+
+ length = i8(read(1))
+ if length:
+ # Don't know the proper encoding,
+ # Latin-1 should be a good guess
+ name = read(length).decode("latin-1", "replace")
+
+ fp.seek(data_end)
+ layers.append((name, mode, (x0, y0, x1, y1)))
+
+ # get tiles
+ for i, (name, mode, bbox) in enumerate(layers):
+ tile = []
+ for m in mode:
+ t = _maketile(fp, m, bbox, 1)
+ if t:
+ tile.extend(t)
+ layers[i] = name, mode, bbox, tile
+
+ return layers
+
+
+def _maketile(file, mode, bbox, channels):
+ tile = None
+ read = file.read
+
+ compression = i16(read(2))
+
+ xsize = bbox[2] - bbox[0]
+ ysize = bbox[3] - bbox[1]
+
+ offset = file.tell()
+
+ if compression == 0:
+ #
+ # raw compression
+ tile = []
+ for channel in range(channels):
+ layer = mode[channel]
+ if mode == "CMYK":
+ layer += ";I"
+ tile.append(("raw", bbox, offset, layer))
+ offset = offset + xsize * ysize
+
+ elif compression == 1:
+ #
+ # packbits compression
+ i = 0
+ tile = []
+ bytecount = read(channels * ysize * 2)
+ offset = file.tell()
+ for channel in range(channels):
+ layer = mode[channel]
+ if mode == "CMYK":
+ layer += ";I"
+ tile.append(("packbits", bbox, offset, layer))
+ for y in range(ysize):
+ offset = offset + i16(bytecount, i)
+ i += 2
+
+ file.seek(offset)
+
+ if offset & 1:
+ read(1) # padding
+
+ return tile
+
+
+# --------------------------------------------------------------------
+# registry
+
+
+Image.register_open(PsdImageFile.format, PsdImageFile, _accept)
+
+Image.register_extension(PsdImageFile.format, ".psd")
+
+Image.register_mime(PsdImageFile.format, "image/vnd.adobe.photoshop")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/PyAccess.py b/Backend/venv/lib/python3.12/site-packages/PIL/PyAccess.py
new file mode 100644
index 00000000..99b46a4a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/PyAccess.py
@@ -0,0 +1,363 @@
+#
+# The Python Imaging Library
+# Pillow fork
+#
+# Python implementation of the PixelAccess Object
+#
+# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved.
+# Copyright (c) 1995-2009 by Fredrik Lundh.
+# Copyright (c) 2013 Eric Soroos
+#
+# See the README file for information on usage and redistribution
+#
+
+# Notes:
+#
+# * Implements the pixel access object following Access.c
+# * Taking only the tuple form, which is used from python.
+# * Fill.c uses the integer form, but it's still going to use the old
+# Access.c implementation.
+#
+
+import logging
+import sys
+
+from ._deprecate import deprecate
+
+try:
+ from cffi import FFI
+
+ defs = """
+ struct Pixel_RGBA {
+ unsigned char r,g,b,a;
+ };
+ struct Pixel_I16 {
+ unsigned char l,r;
+ };
+ """
+ ffi = FFI()
+ ffi.cdef(defs)
+except ImportError as ex:
+ # Allow error import for doc purposes, but error out when accessing
+ # anything in core.
+ from ._util import DeferredError
+
+ FFI = ffi = DeferredError(ex)
+
+logger = logging.getLogger(__name__)
+
+
+class PyAccess:
+ def __init__(self, img, readonly=False):
+ deprecate("PyAccess", 11)
+ vals = dict(img.im.unsafe_ptrs)
+ self.readonly = readonly
+ self.image8 = ffi.cast("unsigned char **", vals["image8"])
+ self.image32 = ffi.cast("int **", vals["image32"])
+ self.image = ffi.cast("unsigned char **", vals["image"])
+ self.xsize, self.ysize = img.im.size
+ self._img = img
+
+ # Keep pointer to im object to prevent dereferencing.
+ self._im = img.im
+ if self._im.mode in ("P", "PA"):
+ self._palette = img.palette
+
+ # Debugging is polluting test traces, only useful here
+ # when hacking on PyAccess
+ # logger.debug("%s", vals)
+ self._post_init()
+
+ def _post_init(self):
+ pass
+
+ def __setitem__(self, xy, color):
+ """
+ Modifies the pixel at x,y. The color is given as a single
+ numerical value for single band images, and a tuple for
+ multi-band images
+
+ :param xy: The pixel coordinate, given as (x, y). See
+ :ref:`coordinate-system`.
+ :param color: The pixel value.
+ """
+ if self.readonly:
+ msg = "Attempt to putpixel a read only image"
+ raise ValueError(msg)
+ (x, y) = xy
+ if x < 0:
+ x = self.xsize + x
+ if y < 0:
+ y = self.ysize + y
+ (x, y) = self.check_xy((x, y))
+
+ if (
+ self._im.mode in ("P", "PA")
+ and isinstance(color, (list, tuple))
+ and len(color) in [3, 4]
+ ):
+ # RGB or RGBA value for a P or PA image
+ if self._im.mode == "PA":
+ alpha = color[3] if len(color) == 4 else 255
+ color = color[:3]
+ color = self._palette.getcolor(color, self._img)
+ if self._im.mode == "PA":
+ color = (color, alpha)
+
+ return self.set_pixel(x, y, color)
+
+ def __getitem__(self, xy):
+ """
+ Returns the pixel at x,y. The pixel is returned as a single
+ value for single band images or a tuple for multiple band
+ images
+
+ :param xy: The pixel coordinate, given as (x, y). See
+ :ref:`coordinate-system`.
+ :returns: a pixel value for single band images, a tuple of
+ pixel values for multiband images.
+ """
+ (x, y) = xy
+ if x < 0:
+ x = self.xsize + x
+ if y < 0:
+ y = self.ysize + y
+ (x, y) = self.check_xy((x, y))
+ return self.get_pixel(x, y)
+
+ putpixel = __setitem__
+ getpixel = __getitem__
+
+ def check_xy(self, xy):
+ (x, y) = xy
+ if not (0 <= x < self.xsize and 0 <= y < self.ysize):
+ msg = "pixel location out of range"
+ raise ValueError(msg)
+ return xy
+
+
+class _PyAccess32_2(PyAccess):
+ """PA, LA, stored in first and last bytes of a 32 bit word"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32)
+
+ def get_pixel(self, x, y):
+ pixel = self.pixels[y][x]
+ return pixel.r, pixel.a
+
+ def set_pixel(self, x, y, color):
+ pixel = self.pixels[y][x]
+ # tuple
+ pixel.r = min(color[0], 255)
+ pixel.a = min(color[1], 255)
+
+
+class _PyAccess32_3(PyAccess):
+ """RGB and friends, stored in the first three bytes of a 32 bit word"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32)
+
+ def get_pixel(self, x, y):
+ pixel = self.pixels[y][x]
+ return pixel.r, pixel.g, pixel.b
+
+ def set_pixel(self, x, y, color):
+ pixel = self.pixels[y][x]
+ # tuple
+ pixel.r = min(color[0], 255)
+ pixel.g = min(color[1], 255)
+ pixel.b = min(color[2], 255)
+ pixel.a = 255
+
+
+class _PyAccess32_4(PyAccess):
+ """RGBA etc, all 4 bytes of a 32 bit word"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32)
+
+ def get_pixel(self, x, y):
+ pixel = self.pixels[y][x]
+ return pixel.r, pixel.g, pixel.b, pixel.a
+
+ def set_pixel(self, x, y, color):
+ pixel = self.pixels[y][x]
+ # tuple
+ pixel.r = min(color[0], 255)
+ pixel.g = min(color[1], 255)
+ pixel.b = min(color[2], 255)
+ pixel.a = min(color[3], 255)
+
+
+class _PyAccess8(PyAccess):
+ """1, L, P, 8 bit images stored as uint8"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = self.image8
+
+ def get_pixel(self, x, y):
+ return self.pixels[y][x]
+
+ def set_pixel(self, x, y, color):
+ try:
+ # integer
+ self.pixels[y][x] = min(color, 255)
+ except TypeError:
+ # tuple
+ self.pixels[y][x] = min(color[0], 255)
+
+
+class _PyAccessI16_N(PyAccess):
+ """I;16 access, native bitendian without conversion"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = ffi.cast("unsigned short **", self.image)
+
+ def get_pixel(self, x, y):
+ return self.pixels[y][x]
+
+ def set_pixel(self, x, y, color):
+ try:
+ # integer
+ self.pixels[y][x] = min(color, 65535)
+ except TypeError:
+ # tuple
+ self.pixels[y][x] = min(color[0], 65535)
+
+
+class _PyAccessI16_L(PyAccess):
+ """I;16L access, with conversion"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = ffi.cast("struct Pixel_I16 **", self.image)
+
+ def get_pixel(self, x, y):
+ pixel = self.pixels[y][x]
+ return pixel.l + pixel.r * 256
+
+ def set_pixel(self, x, y, color):
+ pixel = self.pixels[y][x]
+ try:
+ color = min(color, 65535)
+ except TypeError:
+ color = min(color[0], 65535)
+
+ pixel.l = color & 0xFF # noqa: E741
+ pixel.r = color >> 8
+
+
+class _PyAccessI16_B(PyAccess):
+ """I;16B access, with conversion"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = ffi.cast("struct Pixel_I16 **", self.image)
+
+ def get_pixel(self, x, y):
+ pixel = self.pixels[y][x]
+ return pixel.l * 256 + pixel.r
+
+ def set_pixel(self, x, y, color):
+ pixel = self.pixels[y][x]
+ try:
+ color = min(color, 65535)
+ except Exception:
+ color = min(color[0], 65535)
+
+ pixel.l = color >> 8 # noqa: E741
+ pixel.r = color & 0xFF
+
+
+class _PyAccessI32_N(PyAccess):
+ """Signed Int32 access, native endian"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = self.image32
+
+ def get_pixel(self, x, y):
+ return self.pixels[y][x]
+
+ def set_pixel(self, x, y, color):
+ self.pixels[y][x] = color
+
+
+class _PyAccessI32_Swap(PyAccess):
+ """I;32L/B access, with byteswapping conversion"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = self.image32
+
+ def reverse(self, i):
+ orig = ffi.new("int *", i)
+ chars = ffi.cast("unsigned char *", orig)
+ chars[0], chars[1], chars[2], chars[3] = chars[3], chars[2], chars[1], chars[0]
+ return ffi.cast("int *", chars)[0]
+
+ def get_pixel(self, x, y):
+ return self.reverse(self.pixels[y][x])
+
+ def set_pixel(self, x, y, color):
+ self.pixels[y][x] = self.reverse(color)
+
+
+class _PyAccessF(PyAccess):
+ """32 bit float access"""
+
+ def _post_init(self, *args, **kwargs):
+ self.pixels = ffi.cast("float **", self.image32)
+
+ def get_pixel(self, x, y):
+ return self.pixels[y][x]
+
+ def set_pixel(self, x, y, color):
+ try:
+ # not a tuple
+ self.pixels[y][x] = color
+ except TypeError:
+ # tuple
+ self.pixels[y][x] = color[0]
+
+
+mode_map = {
+ "1": _PyAccess8,
+ "L": _PyAccess8,
+ "P": _PyAccess8,
+ "I;16N": _PyAccessI16_N,
+ "LA": _PyAccess32_2,
+ "La": _PyAccess32_2,
+ "PA": _PyAccess32_2,
+ "RGB": _PyAccess32_3,
+ "LAB": _PyAccess32_3,
+ "HSV": _PyAccess32_3,
+ "YCbCr": _PyAccess32_3,
+ "RGBA": _PyAccess32_4,
+ "RGBa": _PyAccess32_4,
+ "RGBX": _PyAccess32_4,
+ "CMYK": _PyAccess32_4,
+ "F": _PyAccessF,
+ "I": _PyAccessI32_N,
+}
+
+if sys.byteorder == "little":
+ mode_map["I;16"] = _PyAccessI16_N
+ mode_map["I;16L"] = _PyAccessI16_N
+ mode_map["I;16B"] = _PyAccessI16_B
+
+ mode_map["I;32L"] = _PyAccessI32_N
+ mode_map["I;32B"] = _PyAccessI32_Swap
+else:
+ mode_map["I;16"] = _PyAccessI16_L
+ mode_map["I;16L"] = _PyAccessI16_L
+ mode_map["I;16B"] = _PyAccessI16_N
+
+ mode_map["I;32L"] = _PyAccessI32_Swap
+ mode_map["I;32B"] = _PyAccessI32_N
+
+
+def new(img, readonly=False):
+ access_type = mode_map.get(img.mode, None)
+ if not access_type:
+ logger.debug("PyAccess Not Implemented: %s", img.mode)
+ return None
+ return access_type(img, readonly)
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/QoiImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/QoiImagePlugin.py
new file mode 100644
index 00000000..66344faa
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/QoiImagePlugin.py
@@ -0,0 +1,105 @@
+#
+# The Python Imaging Library.
+#
+# QOI support for PIL
+#
+# See the README file for information on usage and redistribution.
+#
+
+import os
+
+from . import Image, ImageFile
+from ._binary import i32be as i32
+from ._binary import o8
+
+
+def _accept(prefix):
+ return prefix[:4] == b"qoif"
+
+
+class QoiImageFile(ImageFile.ImageFile):
+ format = "QOI"
+ format_description = "Quite OK Image"
+
+ def _open(self):
+ if not _accept(self.fp.read(4)):
+ msg = "not a QOI file"
+ raise SyntaxError(msg)
+
+ self._size = tuple(i32(self.fp.read(4)) for i in range(2))
+
+ channels = self.fp.read(1)[0]
+ self._mode = "RGB" if channels == 3 else "RGBA"
+
+ self.fp.seek(1, os.SEEK_CUR) # colorspace
+ self.tile = [("qoi", (0, 0) + self._size, self.fp.tell(), None)]
+
+
+class QoiDecoder(ImageFile.PyDecoder):
+ _pulls_fd = True
+
+ def _add_to_previous_pixels(self, value):
+ self._previous_pixel = value
+
+ r, g, b, a = value
+ hash_value = (r * 3 + g * 5 + b * 7 + a * 11) % 64
+ self._previously_seen_pixels[hash_value] = value
+
+ def decode(self, buffer):
+ self._previously_seen_pixels = {}
+ self._previous_pixel = None
+ self._add_to_previous_pixels(b"".join(o8(i) for i in (0, 0, 0, 255)))
+
+ data = bytearray()
+ bands = Image.getmodebands(self.mode)
+ while len(data) < self.state.xsize * self.state.ysize * bands:
+ byte = self.fd.read(1)[0]
+ if byte == 0b11111110: # QOI_OP_RGB
+ value = self.fd.read(3) + self._previous_pixel[3:]
+ elif byte == 0b11111111: # QOI_OP_RGBA
+ value = self.fd.read(4)
+ else:
+ op = byte >> 6
+ if op == 0: # QOI_OP_INDEX
+ op_index = byte & 0b00111111
+ value = self._previously_seen_pixels.get(op_index, (0, 0, 0, 0))
+ elif op == 1: # QOI_OP_DIFF
+ value = (
+ (self._previous_pixel[0] + ((byte & 0b00110000) >> 4) - 2)
+ % 256,
+ (self._previous_pixel[1] + ((byte & 0b00001100) >> 2) - 2)
+ % 256,
+ (self._previous_pixel[2] + (byte & 0b00000011) - 2) % 256,
+ )
+ value += (self._previous_pixel[3],)
+ elif op == 2: # QOI_OP_LUMA
+ second_byte = self.fd.read(1)[0]
+ diff_green = (byte & 0b00111111) - 32
+ diff_red = ((second_byte & 0b11110000) >> 4) - 8
+ diff_blue = (second_byte & 0b00001111) - 8
+
+ value = tuple(
+ (self._previous_pixel[i] + diff_green + diff) % 256
+ for i, diff in enumerate((diff_red, 0, diff_blue))
+ )
+ value += (self._previous_pixel[3],)
+ elif op == 3: # QOI_OP_RUN
+ run_length = (byte & 0b00111111) + 1
+ value = self._previous_pixel
+ if bands == 3:
+ value = value[:3]
+ data += value * run_length
+ continue
+ value = b"".join(o8(i) for i in value)
+ self._add_to_previous_pixels(value)
+
+ if bands == 3:
+ value = value[:3]
+ data += value
+ self.set_as_raw(bytes(data))
+ return -1, 0
+
+
+Image.register_open(QoiImageFile.format, QoiImageFile, _accept)
+Image.register_decoder("qoi", QoiDecoder)
+Image.register_extension(QoiImageFile.format, ".qoi")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/SgiImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/SgiImagePlugin.py
new file mode 100644
index 00000000..acb9ce5a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/SgiImagePlugin.py
@@ -0,0 +1,231 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# SGI image file handling
+#
+# See "The SGI Image File Format (Draft version 0.97)", Paul Haeberli.
+#
+#
+#
+# History:
+# 2017-22-07 mb Add RLE decompression
+# 2016-16-10 mb Add save method without compression
+# 1995-09-10 fl Created
+#
+# Copyright (c) 2016 by Mickael Bonfill.
+# Copyright (c) 2008 by Karsten Hiddemann.
+# Copyright (c) 1997 by Secret Labs AB.
+# Copyright (c) 1995 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+
+
+import os
+import struct
+
+from . import Image, ImageFile
+from ._binary import i16be as i16
+from ._binary import o8
+
+
+def _accept(prefix):
+ return len(prefix) >= 2 and i16(prefix) == 474
+
+
+MODES = {
+ (1, 1, 1): "L",
+ (1, 2, 1): "L",
+ (2, 1, 1): "L;16B",
+ (2, 2, 1): "L;16B",
+ (1, 3, 3): "RGB",
+ (2, 3, 3): "RGB;16B",
+ (1, 3, 4): "RGBA",
+ (2, 3, 4): "RGBA;16B",
+}
+
+
+##
+# Image plugin for SGI images.
+class SgiImageFile(ImageFile.ImageFile):
+ format = "SGI"
+ format_description = "SGI Image File Format"
+
+ def _open(self):
+ # HEAD
+ headlen = 512
+ s = self.fp.read(headlen)
+
+ if not _accept(s):
+ msg = "Not an SGI image file"
+ raise ValueError(msg)
+
+ # compression : verbatim or RLE
+ compression = s[2]
+
+ # bpc : 1 or 2 bytes (8bits or 16bits)
+ bpc = s[3]
+
+ # dimension : 1, 2 or 3 (depending on xsize, ysize and zsize)
+ dimension = i16(s, 4)
+
+ # xsize : width
+ xsize = i16(s, 6)
+
+ # ysize : height
+ ysize = i16(s, 8)
+
+ # zsize : channels count
+ zsize = i16(s, 10)
+
+ # layout
+ layout = bpc, dimension, zsize
+
+ # determine mode from bits/zsize
+ rawmode = ""
+ try:
+ rawmode = MODES[layout]
+ except KeyError:
+ pass
+
+ if rawmode == "":
+ msg = "Unsupported SGI image mode"
+ raise ValueError(msg)
+
+ self._size = xsize, ysize
+ self._mode = rawmode.split(";")[0]
+ if self.mode == "RGB":
+ self.custom_mimetype = "image/rgb"
+
+ # orientation -1 : scanlines begins at the bottom-left corner
+ orientation = -1
+
+ # decoder info
+ if compression == 0:
+ pagesize = xsize * ysize * bpc
+ if bpc == 2:
+ self.tile = [
+ ("SGI16", (0, 0) + self.size, headlen, (self.mode, 0, orientation))
+ ]
+ else:
+ self.tile = []
+ offset = headlen
+ for layer in self.mode:
+ self.tile.append(
+ ("raw", (0, 0) + self.size, offset, (layer, 0, orientation))
+ )
+ offset += pagesize
+ elif compression == 1:
+ self.tile = [
+ ("sgi_rle", (0, 0) + self.size, headlen, (rawmode, orientation, bpc))
+ ]
+
+
+def _save(im, fp, filename):
+ if im.mode != "RGB" and im.mode != "RGBA" and im.mode != "L":
+ msg = "Unsupported SGI image mode"
+ raise ValueError(msg)
+
+ # Get the keyword arguments
+ info = im.encoderinfo
+
+ # Byte-per-pixel precision, 1 = 8bits per pixel
+ bpc = info.get("bpc", 1)
+
+ if bpc not in (1, 2):
+ msg = "Unsupported number of bytes per pixel"
+ raise ValueError(msg)
+
+ # Flip the image, since the origin of SGI file is the bottom-left corner
+ orientation = -1
+ # Define the file as SGI File Format
+ magic_number = 474
+ # Run-Length Encoding Compression - Unsupported at this time
+ rle = 0
+
+ # Number of dimensions (x,y,z)
+ dim = 3
+ # X Dimension = width / Y Dimension = height
+ x, y = im.size
+ if im.mode == "L" and y == 1:
+ dim = 1
+ elif im.mode == "L":
+ dim = 2
+ # Z Dimension: Number of channels
+ z = len(im.mode)
+
+ if dim == 1 or dim == 2:
+ z = 1
+
+ # assert we've got the right number of bands.
+ if len(im.getbands()) != z:
+ msg = f"incorrect number of bands in SGI write: {z} vs {len(im.getbands())}"
+ raise ValueError(msg)
+
+ # Minimum Byte value
+ pinmin = 0
+ # Maximum Byte value (255 = 8bits per pixel)
+ pinmax = 255
+ # Image name (79 characters max, truncated below in write)
+ img_name = os.path.splitext(os.path.basename(filename))[0]
+ img_name = img_name.encode("ascii", "ignore")
+ # Standard representation of pixel in the file
+ colormap = 0
+ fp.write(struct.pack(">h", magic_number))
+ fp.write(o8(rle))
+ fp.write(o8(bpc))
+ fp.write(struct.pack(">H", dim))
+ fp.write(struct.pack(">H", x))
+ fp.write(struct.pack(">H", y))
+ fp.write(struct.pack(">H", z))
+ fp.write(struct.pack(">l", pinmin))
+ fp.write(struct.pack(">l", pinmax))
+ fp.write(struct.pack("4s", b"")) # dummy
+ fp.write(struct.pack("79s", img_name)) # truncates to 79 chars
+ fp.write(struct.pack("s", b"")) # force null byte after img_name
+ fp.write(struct.pack(">l", colormap))
+ fp.write(struct.pack("404s", b"")) # dummy
+
+ rawmode = "L"
+ if bpc == 2:
+ rawmode = "L;16B"
+
+ for channel in im.split():
+ fp.write(channel.tobytes("raw", rawmode, 0, orientation))
+
+ if hasattr(fp, "flush"):
+ fp.flush()
+
+
+class SGI16Decoder(ImageFile.PyDecoder):
+ _pulls_fd = True
+
+ def decode(self, buffer):
+ rawmode, stride, orientation = self.args
+ pagesize = self.state.xsize * self.state.ysize
+ zsize = len(self.mode)
+ self.fd.seek(512)
+
+ for band in range(zsize):
+ channel = Image.new("L", (self.state.xsize, self.state.ysize))
+ channel.frombytes(
+ self.fd.read(2 * pagesize), "raw", "L;16B", stride, orientation
+ )
+ self.im.putband(channel.im, band)
+
+ return -1, 0
+
+
+#
+# registry
+
+
+Image.register_decoder("SGI16", SGI16Decoder)
+Image.register_open(SgiImageFile.format, SgiImageFile, _accept)
+Image.register_save(SgiImageFile.format, _save)
+Image.register_mime(SgiImageFile.format, "image/sgi")
+
+Image.register_extensions(SgiImageFile.format, [".bw", ".rgb", ".rgba", ".sgi"])
+
+# End of file
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/SpiderImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/SpiderImagePlugin.py
new file mode 100644
index 00000000..408b982b
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/SpiderImagePlugin.py
@@ -0,0 +1,318 @@
+#
+# The Python Imaging Library.
+#
+# SPIDER image file handling
+#
+# History:
+# 2004-08-02 Created BB
+# 2006-03-02 added save method
+# 2006-03-13 added support for stack images
+#
+# Copyright (c) 2004 by Health Research Inc. (HRI) RENSSELAER, NY 12144.
+# Copyright (c) 2004 by William Baxter.
+# Copyright (c) 2004 by Secret Labs AB.
+# Copyright (c) 2004 by Fredrik Lundh.
+#
+
+##
+# Image plugin for the Spider image format. This format is used
+# by the SPIDER software, in processing image data from electron
+# microscopy and tomography.
+##
+
+#
+# SpiderImagePlugin.py
+#
+# The Spider image format is used by SPIDER software, in processing
+# image data from electron microscopy and tomography.
+#
+# Spider home page:
+# https://spider.wadsworth.org/spider_doc/spider/docs/spider.html
+#
+# Details about the Spider image format:
+# https://spider.wadsworth.org/spider_doc/spider/docs/image_doc.html
+#
+import os
+import struct
+import sys
+
+from . import Image, ImageFile
+
+
+def isInt(f):
+ try:
+ i = int(f)
+ if f - i == 0:
+ return 1
+ else:
+ return 0
+ except (ValueError, OverflowError):
+ return 0
+
+
+iforms = [1, 3, -11, -12, -21, -22]
+
+
+# There is no magic number to identify Spider files, so just check a
+# series of header locations to see if they have reasonable values.
+# Returns no. of bytes in the header, if it is a valid Spider header,
+# otherwise returns 0
+
+
+def isSpiderHeader(t):
+ h = (99,) + t # add 1 value so can use spider header index start=1
+ # header values 1,2,5,12,13,22,23 should be integers
+ for i in [1, 2, 5, 12, 13, 22, 23]:
+ if not isInt(h[i]):
+ return 0
+ # check iform
+ iform = int(h[5])
+ if iform not in iforms:
+ return 0
+ # check other header values
+ labrec = int(h[13]) # no. records in file header
+ labbyt = int(h[22]) # total no. of bytes in header
+ lenbyt = int(h[23]) # record length in bytes
+ if labbyt != (labrec * lenbyt):
+ return 0
+ # looks like a valid header
+ return labbyt
+
+
+def isSpiderImage(filename):
+ with open(filename, "rb") as fp:
+ f = fp.read(92) # read 23 * 4 bytes
+ t = struct.unpack(">23f", f) # try big-endian first
+ hdrlen = isSpiderHeader(t)
+ if hdrlen == 0:
+ t = struct.unpack("<23f", f) # little-endian
+ hdrlen = isSpiderHeader(t)
+ return hdrlen
+
+
+class SpiderImageFile(ImageFile.ImageFile):
+ format = "SPIDER"
+ format_description = "Spider 2D image"
+ _close_exclusive_fp_after_loading = False
+
+ def _open(self):
+ # check header
+ n = 27 * 4 # read 27 float values
+ f = self.fp.read(n)
+
+ try:
+ self.bigendian = 1
+ t = struct.unpack(">27f", f) # try big-endian first
+ hdrlen = isSpiderHeader(t)
+ if hdrlen == 0:
+ self.bigendian = 0
+ t = struct.unpack("<27f", f) # little-endian
+ hdrlen = isSpiderHeader(t)
+ if hdrlen == 0:
+ msg = "not a valid Spider file"
+ raise SyntaxError(msg)
+ except struct.error as e:
+ msg = "not a valid Spider file"
+ raise SyntaxError(msg) from e
+
+ h = (99,) + t # add 1 value : spider header index starts at 1
+ iform = int(h[5])
+ if iform != 1:
+ msg = "not a Spider 2D image"
+ raise SyntaxError(msg)
+
+ self._size = int(h[12]), int(h[2]) # size in pixels (width, height)
+ self.istack = int(h[24])
+ self.imgnumber = int(h[27])
+
+ if self.istack == 0 and self.imgnumber == 0:
+ # stk=0, img=0: a regular 2D image
+ offset = hdrlen
+ self._nimages = 1
+ elif self.istack > 0 and self.imgnumber == 0:
+ # stk>0, img=0: Opening the stack for the first time
+ self.imgbytes = int(h[12]) * int(h[2]) * 4
+ self.hdrlen = hdrlen
+ self._nimages = int(h[26])
+ # Point to the first image in the stack
+ offset = hdrlen * 2
+ self.imgnumber = 1
+ elif self.istack == 0 and self.imgnumber > 0:
+ # stk=0, img>0: an image within the stack
+ offset = hdrlen + self.stkoffset
+ self.istack = 2 # So Image knows it's still a stack
+ else:
+ msg = "inconsistent stack header values"
+ raise SyntaxError(msg)
+
+ if self.bigendian:
+ self.rawmode = "F;32BF"
+ else:
+ self.rawmode = "F;32F"
+ self._mode = "F"
+
+ self.tile = [("raw", (0, 0) + self.size, offset, (self.rawmode, 0, 1))]
+ self._fp = self.fp # FIXME: hack
+
+ @property
+ def n_frames(self):
+ return self._nimages
+
+ @property
+ def is_animated(self):
+ return self._nimages > 1
+
+ # 1st image index is zero (although SPIDER imgnumber starts at 1)
+ def tell(self):
+ if self.imgnumber < 1:
+ return 0
+ else:
+ return self.imgnumber - 1
+
+ def seek(self, frame):
+ if self.istack == 0:
+ msg = "attempt to seek in a non-stack file"
+ raise EOFError(msg)
+ if not self._seek_check(frame):
+ return
+ self.stkoffset = self.hdrlen + frame * (self.hdrlen + self.imgbytes)
+ self.fp = self._fp
+ self.fp.seek(self.stkoffset)
+ self._open()
+
+ # returns a byte image after rescaling to 0..255
+ def convert2byte(self, depth=255):
+ (minimum, maximum) = self.getextrema()
+ m = 1
+ if maximum != minimum:
+ m = depth / (maximum - minimum)
+ b = -m * minimum
+ return self.point(lambda i, m=m, b=b: i * m + b).convert("L")
+
+ # returns a ImageTk.PhotoImage object, after rescaling to 0..255
+ def tkPhotoImage(self):
+ from . import ImageTk
+
+ return ImageTk.PhotoImage(self.convert2byte(), palette=256)
+
+
+# --------------------------------------------------------------------
+# Image series
+
+
+# given a list of filenames, return a list of images
+def loadImageSeries(filelist=None):
+ """create a list of :py:class:`~PIL.Image.Image` objects for use in a montage"""
+ if filelist is None or len(filelist) < 1:
+ return
+
+ imglist = []
+ for img in filelist:
+ if not os.path.exists(img):
+ print(f"unable to find {img}")
+ continue
+ try:
+ with Image.open(img) as im:
+ im = im.convert2byte()
+ except Exception:
+ if not isSpiderImage(img):
+ print(img + " is not a Spider image file")
+ continue
+ im.info["filename"] = img
+ imglist.append(im)
+ return imglist
+
+
+# --------------------------------------------------------------------
+# For saving images in Spider format
+
+
+def makeSpiderHeader(im):
+ nsam, nrow = im.size
+ lenbyt = nsam * 4 # There are labrec records in the header
+ labrec = int(1024 / lenbyt)
+ if 1024 % lenbyt != 0:
+ labrec += 1
+ labbyt = labrec * lenbyt
+ nvalues = int(labbyt / 4)
+ if nvalues < 23:
+ return []
+
+ hdr = []
+ for i in range(nvalues):
+ hdr.append(0.0)
+
+ # NB these are Fortran indices
+ hdr[1] = 1.0 # nslice (=1 for an image)
+ hdr[2] = float(nrow) # number of rows per slice
+ hdr[3] = float(nrow) # number of records in the image
+ hdr[5] = 1.0 # iform for 2D image
+ hdr[12] = float(nsam) # number of pixels per line
+ hdr[13] = float(labrec) # number of records in file header
+ hdr[22] = float(labbyt) # total number of bytes in header
+ hdr[23] = float(lenbyt) # record length in bytes
+
+ # adjust for Fortran indexing
+ hdr = hdr[1:]
+ hdr.append(0.0)
+ # pack binary data into a string
+ return [struct.pack("f", v) for v in hdr]
+
+
+def _save(im, fp, filename):
+ if im.mode[0] != "F":
+ im = im.convert("F")
+
+ hdr = makeSpiderHeader(im)
+ if len(hdr) < 256:
+ msg = "Error creating Spider header"
+ raise OSError(msg)
+
+ # write the SPIDER header
+ fp.writelines(hdr)
+
+ rawmode = "F;32NF" # 32-bit native floating point
+ ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))])
+
+
+def _save_spider(im, fp, filename):
+ # get the filename extension and register it with Image
+ ext = os.path.splitext(filename)[1]
+ Image.register_extension(SpiderImageFile.format, ext)
+ _save(im, fp, filename)
+
+
+# --------------------------------------------------------------------
+
+
+Image.register_open(SpiderImageFile.format, SpiderImageFile)
+Image.register_save(SpiderImageFile.format, _save_spider)
+
+if __name__ == "__main__":
+ if len(sys.argv) < 2:
+ print("Syntax: python3 SpiderImagePlugin.py [infile] [outfile]")
+ sys.exit()
+
+ filename = sys.argv[1]
+ if not isSpiderImage(filename):
+ print("input image must be in Spider format")
+ sys.exit()
+
+ with Image.open(filename) as im:
+ print("image: " + str(im))
+ print("format: " + str(im.format))
+ print("size: " + str(im.size))
+ print("mode: " + str(im.mode))
+ print("max, min: ", end=" ")
+ print(im.getextrema())
+
+ if len(sys.argv) > 2:
+ outfile = sys.argv[2]
+
+ # perform some image operation
+ im = im.transpose(Image.Transpose.FLIP_LEFT_RIGHT)
+ print(
+ f"saving a flipped version of {os.path.basename(filename)} "
+ f"as {outfile} "
+ )
+ im.save(outfile, SpiderImageFile.format)
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/SunImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/SunImagePlugin.py
new file mode 100644
index 00000000..6a8d5d86
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/SunImagePlugin.py
@@ -0,0 +1,139 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# Sun image file handling
+#
+# History:
+# 1995-09-10 fl Created
+# 1996-05-28 fl Fixed 32-bit alignment
+# 1998-12-29 fl Import ImagePalette module
+# 2001-12-18 fl Fixed palette loading (from Jean-Claude Rimbault)
+#
+# Copyright (c) 1997-2001 by Secret Labs AB
+# Copyright (c) 1995-1996 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+
+from . import Image, ImageFile, ImagePalette
+from ._binary import i32be as i32
+
+
+def _accept(prefix):
+ return len(prefix) >= 4 and i32(prefix) == 0x59A66A95
+
+
+##
+# Image plugin for Sun raster files.
+
+
+class SunImageFile(ImageFile.ImageFile):
+ format = "SUN"
+ format_description = "Sun Raster File"
+
+ def _open(self):
+ # The Sun Raster file header is 32 bytes in length
+ # and has the following format:
+
+ # typedef struct _SunRaster
+ # {
+ # DWORD MagicNumber; /* Magic (identification) number */
+ # DWORD Width; /* Width of image in pixels */
+ # DWORD Height; /* Height of image in pixels */
+ # DWORD Depth; /* Number of bits per pixel */
+ # DWORD Length; /* Size of image data in bytes */
+ # DWORD Type; /* Type of raster file */
+ # DWORD ColorMapType; /* Type of color map */
+ # DWORD ColorMapLength; /* Size of the color map in bytes */
+ # } SUNRASTER;
+
+ # HEAD
+ s = self.fp.read(32)
+ if not _accept(s):
+ msg = "not an SUN raster file"
+ raise SyntaxError(msg)
+
+ offset = 32
+
+ self._size = i32(s, 4), i32(s, 8)
+
+ depth = i32(s, 12)
+ # data_length = i32(s, 16) # unreliable, ignore.
+ file_type = i32(s, 20)
+ palette_type = i32(s, 24) # 0: None, 1: RGB, 2: Raw/arbitrary
+ palette_length = i32(s, 28)
+
+ if depth == 1:
+ self._mode, rawmode = "1", "1;I"
+ elif depth == 4:
+ self._mode, rawmode = "L", "L;4"
+ elif depth == 8:
+ self._mode = rawmode = "L"
+ elif depth == 24:
+ if file_type == 3:
+ self._mode, rawmode = "RGB", "RGB"
+ else:
+ self._mode, rawmode = "RGB", "BGR"
+ elif depth == 32:
+ if file_type == 3:
+ self._mode, rawmode = "RGB", "RGBX"
+ else:
+ self._mode, rawmode = "RGB", "BGRX"
+ else:
+ msg = "Unsupported Mode/Bit Depth"
+ raise SyntaxError(msg)
+
+ if palette_length:
+ if palette_length > 1024:
+ msg = "Unsupported Color Palette Length"
+ raise SyntaxError(msg)
+
+ if palette_type != 1:
+ msg = "Unsupported Palette Type"
+ raise SyntaxError(msg)
+
+ offset = offset + palette_length
+ self.palette = ImagePalette.raw("RGB;L", self.fp.read(palette_length))
+ if self.mode == "L":
+ self._mode = "P"
+ rawmode = rawmode.replace("L", "P")
+
+ # 16 bit boundaries on stride
+ stride = ((self.size[0] * depth + 15) // 16) * 2
+
+ # file type: Type is the version (or flavor) of the bitmap
+ # file. The following values are typically found in the Type
+ # field:
+ # 0000h Old
+ # 0001h Standard
+ # 0002h Byte-encoded
+ # 0003h RGB format
+ # 0004h TIFF format
+ # 0005h IFF format
+ # FFFFh Experimental
+
+ # Old and standard are the same, except for the length tag.
+ # byte-encoded is run-length-encoded
+ # RGB looks similar to standard, but RGB byte order
+ # TIFF and IFF mean that they were converted from T/IFF
+ # Experimental means that it's something else.
+ # (https://www.fileformat.info/format/sunraster/egff.htm)
+
+ if file_type in (0, 1, 3, 4, 5):
+ self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride))]
+ elif file_type == 2:
+ self.tile = [("sun_rle", (0, 0) + self.size, offset, rawmode)]
+ else:
+ msg = "Unsupported Sun Raster file type"
+ raise SyntaxError(msg)
+
+
+#
+# registry
+
+
+Image.register_open(SunImageFile.format, SunImageFile, _accept)
+
+Image.register_extension(SunImageFile.format, ".ras")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/TarIO.py b/Backend/venv/lib/python3.12/site-packages/PIL/TarIO.py
new file mode 100644
index 00000000..32928f6a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/TarIO.py
@@ -0,0 +1,66 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# read files from within a tar file
+#
+# History:
+# 95-06-18 fl Created
+# 96-05-28 fl Open files in binary mode
+#
+# Copyright (c) Secret Labs AB 1997.
+# Copyright (c) Fredrik Lundh 1995-96.
+#
+# See the README file for information on usage and redistribution.
+#
+
+import io
+
+from . import ContainerIO
+
+
+class TarIO(ContainerIO.ContainerIO):
+ """A file object that provides read access to a given member of a TAR file."""
+
+ def __init__(self, tarfile, file):
+ """
+ Create file object.
+
+ :param tarfile: Name of TAR file.
+ :param file: Name of member file.
+ """
+ self.fh = open(tarfile, "rb")
+
+ while True:
+ s = self.fh.read(512)
+ if len(s) != 512:
+ msg = "unexpected end of tar file"
+ raise OSError(msg)
+
+ name = s[:100].decode("utf-8")
+ i = name.find("\0")
+ if i == 0:
+ msg = "cannot find subfile"
+ raise OSError(msg)
+ if i > 0:
+ name = name[:i]
+
+ size = int(s[124:135], 8)
+
+ if file == name:
+ break
+
+ self.fh.seek((size + 511) & (~511), io.SEEK_CUR)
+
+ # Open region
+ super().__init__(self.fh, self.fh.tell(), size)
+
+ # Context manager support
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
+
+ def close(self):
+ self.fh.close()
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/TgaImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/TgaImagePlugin.py
new file mode 100644
index 00000000..f24ee4f5
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/TgaImagePlugin.py
@@ -0,0 +1,255 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# TGA file handling
+#
+# History:
+# 95-09-01 fl created (reads 24-bit files only)
+# 97-01-04 fl support more TGA versions, including compressed images
+# 98-07-04 fl fixed orientation and alpha layer bugs
+# 98-09-11 fl fixed orientation for runlength decoder
+#
+# Copyright (c) Secret Labs AB 1997-98.
+# Copyright (c) Fredrik Lundh 1995-97.
+#
+# See the README file for information on usage and redistribution.
+#
+
+
+import warnings
+
+from . import Image, ImageFile, ImagePalette
+from ._binary import i16le as i16
+from ._binary import o8
+from ._binary import o16le as o16
+
+#
+# --------------------------------------------------------------------
+# Read RGA file
+
+
+MODES = {
+ # map imagetype/depth to rawmode
+ (1, 8): "P",
+ (3, 1): "1",
+ (3, 8): "L",
+ (3, 16): "LA",
+ (2, 16): "BGR;5",
+ (2, 24): "BGR",
+ (2, 32): "BGRA",
+}
+
+
+##
+# Image plugin for Targa files.
+
+
+class TgaImageFile(ImageFile.ImageFile):
+ format = "TGA"
+ format_description = "Targa"
+
+ def _open(self):
+ # process header
+ s = self.fp.read(18)
+
+ id_len = s[0]
+
+ colormaptype = s[1]
+ imagetype = s[2]
+
+ depth = s[16]
+
+ flags = s[17]
+
+ self._size = i16(s, 12), i16(s, 14)
+
+ # validate header fields
+ if (
+ colormaptype not in (0, 1)
+ or self.size[0] <= 0
+ or self.size[1] <= 0
+ or depth not in (1, 8, 16, 24, 32)
+ ):
+ msg = "not a TGA file"
+ raise SyntaxError(msg)
+
+ # image mode
+ if imagetype in (3, 11):
+ self._mode = "L"
+ if depth == 1:
+ self._mode = "1" # ???
+ elif depth == 16:
+ self._mode = "LA"
+ elif imagetype in (1, 9):
+ self._mode = "P"
+ elif imagetype in (2, 10):
+ self._mode = "RGB"
+ if depth == 32:
+ self._mode = "RGBA"
+ else:
+ msg = "unknown TGA mode"
+ raise SyntaxError(msg)
+
+ # orientation
+ orientation = flags & 0x30
+ self._flip_horizontally = orientation in [0x10, 0x30]
+ if orientation in [0x20, 0x30]:
+ orientation = 1
+ elif orientation in [0, 0x10]:
+ orientation = -1
+ else:
+ msg = "unknown TGA orientation"
+ raise SyntaxError(msg)
+
+ self.info["orientation"] = orientation
+
+ if imagetype & 8:
+ self.info["compression"] = "tga_rle"
+
+ if id_len:
+ self.info["id_section"] = self.fp.read(id_len)
+
+ if colormaptype:
+ # read palette
+ start, size, mapdepth = i16(s, 3), i16(s, 5), s[7]
+ if mapdepth == 16:
+ self.palette = ImagePalette.raw(
+ "BGR;15", b"\0" * 2 * start + self.fp.read(2 * size)
+ )
+ elif mapdepth == 24:
+ self.palette = ImagePalette.raw(
+ "BGR", b"\0" * 3 * start + self.fp.read(3 * size)
+ )
+ elif mapdepth == 32:
+ self.palette = ImagePalette.raw(
+ "BGRA", b"\0" * 4 * start + self.fp.read(4 * size)
+ )
+
+ # setup tile descriptor
+ try:
+ rawmode = MODES[(imagetype & 7, depth)]
+ if imagetype & 8:
+ # compressed
+ self.tile = [
+ (
+ "tga_rle",
+ (0, 0) + self.size,
+ self.fp.tell(),
+ (rawmode, orientation, depth),
+ )
+ ]
+ else:
+ self.tile = [
+ (
+ "raw",
+ (0, 0) + self.size,
+ self.fp.tell(),
+ (rawmode, 0, orientation),
+ )
+ ]
+ except KeyError:
+ pass # cannot decode
+
+ def load_end(self):
+ if self._flip_horizontally:
+ self.im = self.im.transpose(Image.Transpose.FLIP_LEFT_RIGHT)
+
+
+#
+# --------------------------------------------------------------------
+# Write TGA file
+
+
+SAVE = {
+ "1": ("1", 1, 0, 3),
+ "L": ("L", 8, 0, 3),
+ "LA": ("LA", 16, 0, 3),
+ "P": ("P", 8, 1, 1),
+ "RGB": ("BGR", 24, 0, 2),
+ "RGBA": ("BGRA", 32, 0, 2),
+}
+
+
+def _save(im, fp, filename):
+ try:
+ rawmode, bits, colormaptype, imagetype = SAVE[im.mode]
+ except KeyError as e:
+ msg = f"cannot write mode {im.mode} as TGA"
+ raise OSError(msg) from e
+
+ if "rle" in im.encoderinfo:
+ rle = im.encoderinfo["rle"]
+ else:
+ compression = im.encoderinfo.get("compression", im.info.get("compression"))
+ rle = compression == "tga_rle"
+ if rle:
+ imagetype += 8
+
+ id_section = im.encoderinfo.get("id_section", im.info.get("id_section", ""))
+ id_len = len(id_section)
+ if id_len > 255:
+ id_len = 255
+ id_section = id_section[:255]
+ warnings.warn("id_section has been trimmed to 255 characters")
+
+ if colormaptype:
+ palette = im.im.getpalette("RGB", "BGR")
+ colormaplength, colormapentry = len(palette) // 3, 24
+ else:
+ colormaplength, colormapentry = 0, 0
+
+ if im.mode in ("LA", "RGBA"):
+ flags = 8
+ else:
+ flags = 0
+
+ orientation = im.encoderinfo.get("orientation", im.info.get("orientation", -1))
+ if orientation > 0:
+ flags = flags | 0x20
+
+ fp.write(
+ o8(id_len)
+ + o8(colormaptype)
+ + o8(imagetype)
+ + o16(0) # colormapfirst
+ + o16(colormaplength)
+ + o8(colormapentry)
+ + o16(0)
+ + o16(0)
+ + o16(im.size[0])
+ + o16(im.size[1])
+ + o8(bits)
+ + o8(flags)
+ )
+
+ if id_section:
+ fp.write(id_section)
+
+ if colormaptype:
+ fp.write(palette)
+
+ if rle:
+ ImageFile._save(
+ im, fp, [("tga_rle", (0, 0) + im.size, 0, (rawmode, orientation))]
+ )
+ else:
+ ImageFile._save(
+ im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, orientation))]
+ )
+
+ # write targa version 2 footer
+ fp.write(b"\000" * 8 + b"TRUEVISION-XFILE." + b"\000")
+
+
+#
+# --------------------------------------------------------------------
+# Registry
+
+
+Image.register_open(TgaImageFile.format, TgaImageFile)
+Image.register_save(TgaImageFile.format, _save)
+
+Image.register_extensions(TgaImageFile.format, [".tga", ".icb", ".vda", ".vst"])
+
+Image.register_mime(TgaImageFile.format, "image/x-tga")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/TiffImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/TiffImagePlugin.py
new file mode 100644
index 00000000..dabf8dbf
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/TiffImagePlugin.py
@@ -0,0 +1,2156 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# TIFF file handling
+#
+# TIFF is a flexible, if somewhat aged, image file format originally
+# defined by Aldus. Although TIFF supports a wide variety of pixel
+# layouts and compression methods, the name doesn't really stand for
+# "thousands of incompatible file formats," it just feels that way.
+#
+# To read TIFF data from a stream, the stream must be seekable. For
+# progressive decoding, make sure to use TIFF files where the tag
+# directory is placed first in the file.
+#
+# History:
+# 1995-09-01 fl Created
+# 1996-05-04 fl Handle JPEGTABLES tag
+# 1996-05-18 fl Fixed COLORMAP support
+# 1997-01-05 fl Fixed PREDICTOR support
+# 1997-08-27 fl Added support for rational tags (from Perry Stoll)
+# 1998-01-10 fl Fixed seek/tell (from Jan Blom)
+# 1998-07-15 fl Use private names for internal variables
+# 1999-06-13 fl Rewritten for PIL 1.0 (1.0)
+# 2000-10-11 fl Additional fixes for Python 2.0 (1.1)
+# 2001-04-17 fl Fixed rewind support (seek to frame 0) (1.2)
+# 2001-05-12 fl Added write support for more tags (from Greg Couch) (1.3)
+# 2001-12-18 fl Added workaround for broken Matrox library
+# 2002-01-18 fl Don't mess up if photometric tag is missing (D. Alan Stewart)
+# 2003-05-19 fl Check FILLORDER tag
+# 2003-09-26 fl Added RGBa support
+# 2004-02-24 fl Added DPI support; fixed rational write support
+# 2005-02-07 fl Added workaround for broken Corel Draw 10 files
+# 2006-01-09 fl Added support for float/double tags (from Russell Nelson)
+#
+# Copyright (c) 1997-2006 by Secret Labs AB. All rights reserved.
+# Copyright (c) 1995-1997 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+import io
+import itertools
+import logging
+import math
+import os
+import struct
+import warnings
+from collections.abc import MutableMapping
+from fractions import Fraction
+from numbers import Number, Rational
+
+from . import ExifTags, Image, ImageFile, ImageOps, ImagePalette, TiffTags
+from ._binary import i16be as i16
+from ._binary import i32be as i32
+from ._binary import o8
+from .TiffTags import TYPES
+
+logger = logging.getLogger(__name__)
+
+# Set these to true to force use of libtiff for reading or writing.
+READ_LIBTIFF = False
+WRITE_LIBTIFF = False
+IFD_LEGACY_API = True
+STRIP_SIZE = 65536
+
+II = b"II" # little-endian (Intel style)
+MM = b"MM" # big-endian (Motorola style)
+
+#
+# --------------------------------------------------------------------
+# Read TIFF files
+
+# a few tag names, just to make the code below a bit more readable
+IMAGEWIDTH = 256
+IMAGELENGTH = 257
+BITSPERSAMPLE = 258
+COMPRESSION = 259
+PHOTOMETRIC_INTERPRETATION = 262
+FILLORDER = 266
+IMAGEDESCRIPTION = 270
+STRIPOFFSETS = 273
+SAMPLESPERPIXEL = 277
+ROWSPERSTRIP = 278
+STRIPBYTECOUNTS = 279
+X_RESOLUTION = 282
+Y_RESOLUTION = 283
+PLANAR_CONFIGURATION = 284
+RESOLUTION_UNIT = 296
+TRANSFERFUNCTION = 301
+SOFTWARE = 305
+DATE_TIME = 306
+ARTIST = 315
+PREDICTOR = 317
+COLORMAP = 320
+TILEWIDTH = 322
+TILELENGTH = 323
+TILEOFFSETS = 324
+TILEBYTECOUNTS = 325
+SUBIFD = 330
+EXTRASAMPLES = 338
+SAMPLEFORMAT = 339
+JPEGTABLES = 347
+YCBCRSUBSAMPLING = 530
+REFERENCEBLACKWHITE = 532
+COPYRIGHT = 33432
+IPTC_NAA_CHUNK = 33723 # newsphoto properties
+PHOTOSHOP_CHUNK = 34377 # photoshop properties
+ICCPROFILE = 34675
+EXIFIFD = 34665
+XMP = 700
+JPEGQUALITY = 65537 # pseudo-tag by libtiff
+
+# https://github.com/imagej/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java
+IMAGEJ_META_DATA_BYTE_COUNTS = 50838
+IMAGEJ_META_DATA = 50839
+
+COMPRESSION_INFO = {
+ # Compression => pil compression name
+ 1: "raw",
+ 2: "tiff_ccitt",
+ 3: "group3",
+ 4: "group4",
+ 5: "tiff_lzw",
+ 6: "tiff_jpeg", # obsolete
+ 7: "jpeg",
+ 8: "tiff_adobe_deflate",
+ 32771: "tiff_raw_16", # 16-bit padding
+ 32773: "packbits",
+ 32809: "tiff_thunderscan",
+ 32946: "tiff_deflate",
+ 34676: "tiff_sgilog",
+ 34677: "tiff_sgilog24",
+ 34925: "lzma",
+ 50000: "zstd",
+ 50001: "webp",
+}
+
+COMPRESSION_INFO_REV = {v: k for k, v in COMPRESSION_INFO.items()}
+
+OPEN_INFO = {
+ # (ByteOrder, PhotoInterpretation, SampleFormat, FillOrder, BitsPerSample,
+ # ExtraSamples) => mode, rawmode
+ (II, 0, (1,), 1, (1,), ()): ("1", "1;I"),
+ (MM, 0, (1,), 1, (1,), ()): ("1", "1;I"),
+ (II, 0, (1,), 2, (1,), ()): ("1", "1;IR"),
+ (MM, 0, (1,), 2, (1,), ()): ("1", "1;IR"),
+ (II, 1, (1,), 1, (1,), ()): ("1", "1"),
+ (MM, 1, (1,), 1, (1,), ()): ("1", "1"),
+ (II, 1, (1,), 2, (1,), ()): ("1", "1;R"),
+ (MM, 1, (1,), 2, (1,), ()): ("1", "1;R"),
+ (II, 0, (1,), 1, (2,), ()): ("L", "L;2I"),
+ (MM, 0, (1,), 1, (2,), ()): ("L", "L;2I"),
+ (II, 0, (1,), 2, (2,), ()): ("L", "L;2IR"),
+ (MM, 0, (1,), 2, (2,), ()): ("L", "L;2IR"),
+ (II, 1, (1,), 1, (2,), ()): ("L", "L;2"),
+ (MM, 1, (1,), 1, (2,), ()): ("L", "L;2"),
+ (II, 1, (1,), 2, (2,), ()): ("L", "L;2R"),
+ (MM, 1, (1,), 2, (2,), ()): ("L", "L;2R"),
+ (II, 0, (1,), 1, (4,), ()): ("L", "L;4I"),
+ (MM, 0, (1,), 1, (4,), ()): ("L", "L;4I"),
+ (II, 0, (1,), 2, (4,), ()): ("L", "L;4IR"),
+ (MM, 0, (1,), 2, (4,), ()): ("L", "L;4IR"),
+ (II, 1, (1,), 1, (4,), ()): ("L", "L;4"),
+ (MM, 1, (1,), 1, (4,), ()): ("L", "L;4"),
+ (II, 1, (1,), 2, (4,), ()): ("L", "L;4R"),
+ (MM, 1, (1,), 2, (4,), ()): ("L", "L;4R"),
+ (II, 0, (1,), 1, (8,), ()): ("L", "L;I"),
+ (MM, 0, (1,), 1, (8,), ()): ("L", "L;I"),
+ (II, 0, (1,), 2, (8,), ()): ("L", "L;IR"),
+ (MM, 0, (1,), 2, (8,), ()): ("L", "L;IR"),
+ (II, 1, (1,), 1, (8,), ()): ("L", "L"),
+ (MM, 1, (1,), 1, (8,), ()): ("L", "L"),
+ (II, 1, (2,), 1, (8,), ()): ("L", "L"),
+ (MM, 1, (2,), 1, (8,), ()): ("L", "L"),
+ (II, 1, (1,), 2, (8,), ()): ("L", "L;R"),
+ (MM, 1, (1,), 2, (8,), ()): ("L", "L;R"),
+ (II, 1, (1,), 1, (12,), ()): ("I;16", "I;12"),
+ (II, 0, (1,), 1, (16,), ()): ("I;16", "I;16"),
+ (II, 1, (1,), 1, (16,), ()): ("I;16", "I;16"),
+ (MM, 1, (1,), 1, (16,), ()): ("I;16B", "I;16B"),
+ (II, 1, (1,), 2, (16,), ()): ("I;16", "I;16R"),
+ (II, 1, (2,), 1, (16,), ()): ("I", "I;16S"),
+ (MM, 1, (2,), 1, (16,), ()): ("I", "I;16BS"),
+ (II, 0, (3,), 1, (32,), ()): ("F", "F;32F"),
+ (MM, 0, (3,), 1, (32,), ()): ("F", "F;32BF"),
+ (II, 1, (1,), 1, (32,), ()): ("I", "I;32N"),
+ (II, 1, (2,), 1, (32,), ()): ("I", "I;32S"),
+ (MM, 1, (2,), 1, (32,), ()): ("I", "I;32BS"),
+ (II, 1, (3,), 1, (32,), ()): ("F", "F;32F"),
+ (MM, 1, (3,), 1, (32,), ()): ("F", "F;32BF"),
+ (II, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"),
+ (MM, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"),
+ (II, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"),
+ (MM, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"),
+ (II, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"),
+ (MM, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"),
+ (II, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples
+ (MM, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples
+ (II, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"),
+ (MM, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"),
+ (II, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGBX", "RGBXX"),
+ (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGBX", "RGBXX"),
+ (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGBX", "RGBXXX"),
+ (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGBX", "RGBXXX"),
+ (II, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"),
+ (MM, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"),
+ (II, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"),
+ (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"),
+ (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"),
+ (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"),
+ (II, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"),
+ (MM, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"),
+ (II, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"),
+ (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"),
+ (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"),
+ (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"),
+ (II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10
+ (MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10
+ (II, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16L"),
+ (MM, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16B"),
+ (II, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16L"),
+ (MM, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16B"),
+ (II, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGBX", "RGBX;16L"),
+ (MM, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGBX", "RGBX;16B"),
+ (II, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16L"),
+ (MM, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16B"),
+ (II, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16L"),
+ (MM, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16B"),
+ (II, 3, (1,), 1, (1,), ()): ("P", "P;1"),
+ (MM, 3, (1,), 1, (1,), ()): ("P", "P;1"),
+ (II, 3, (1,), 2, (1,), ()): ("P", "P;1R"),
+ (MM, 3, (1,), 2, (1,), ()): ("P", "P;1R"),
+ (II, 3, (1,), 1, (2,), ()): ("P", "P;2"),
+ (MM, 3, (1,), 1, (2,), ()): ("P", "P;2"),
+ (II, 3, (1,), 2, (2,), ()): ("P", "P;2R"),
+ (MM, 3, (1,), 2, (2,), ()): ("P", "P;2R"),
+ (II, 3, (1,), 1, (4,), ()): ("P", "P;4"),
+ (MM, 3, (1,), 1, (4,), ()): ("P", "P;4"),
+ (II, 3, (1,), 2, (4,), ()): ("P", "P;4R"),
+ (MM, 3, (1,), 2, (4,), ()): ("P", "P;4R"),
+ (II, 3, (1,), 1, (8,), ()): ("P", "P"),
+ (MM, 3, (1,), 1, (8,), ()): ("P", "P"),
+ (II, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"),
+ (MM, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"),
+ (II, 3, (1,), 2, (8,), ()): ("P", "P;R"),
+ (MM, 3, (1,), 2, (8,), ()): ("P", "P;R"),
+ (II, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"),
+ (MM, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"),
+ (II, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"),
+ (MM, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"),
+ (II, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"),
+ (MM, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"),
+ (II, 5, (1,), 1, (16, 16, 16, 16), ()): ("CMYK", "CMYK;16L"),
+ (II, 6, (1,), 1, (8,), ()): ("L", "L"),
+ (MM, 6, (1,), 1, (8,), ()): ("L", "L"),
+ # JPEG compressed images handled by LibTiff and auto-converted to RGBX
+ # Minimal Baseline TIFF requires YCbCr images to have 3 SamplesPerPixel
+ (II, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"),
+ (MM, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGBX"),
+ (II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"),
+ (MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"),
+}
+
+MAX_SAMPLESPERPIXEL = max(len(key_tp[4]) for key_tp in OPEN_INFO)
+
+PREFIXES = [
+ b"MM\x00\x2A", # Valid TIFF header with big-endian byte order
+ b"II\x2A\x00", # Valid TIFF header with little-endian byte order
+ b"MM\x2A\x00", # Invalid TIFF header, assume big-endian
+ b"II\x00\x2A", # Invalid TIFF header, assume little-endian
+ b"MM\x00\x2B", # BigTIFF with big-endian byte order
+ b"II\x2B\x00", # BigTIFF with little-endian byte order
+]
+
+
+def _accept(prefix):
+ return prefix[:4] in PREFIXES
+
+
+def _limit_rational(val, max_val):
+ inv = abs(val) > 1
+ n_d = IFDRational(1 / val if inv else val).limit_rational(max_val)
+ return n_d[::-1] if inv else n_d
+
+
+def _limit_signed_rational(val, max_val, min_val):
+ frac = Fraction(val)
+ n_d = frac.numerator, frac.denominator
+
+ if min(n_d) < min_val:
+ n_d = _limit_rational(val, abs(min_val))
+
+ if max(n_d) > max_val:
+ val = Fraction(*n_d)
+ n_d = _limit_rational(val, max_val)
+
+ return n_d
+
+
+##
+# Wrapper for TIFF IFDs.
+
+_load_dispatch = {}
+_write_dispatch = {}
+
+
+class IFDRational(Rational):
+ """Implements a rational class where 0/0 is a legal value to match
+ the in the wild use of exif rationals.
+
+ e.g., DigitalZoomRatio - 0.00/0.00 indicates that no digital zoom was used
+ """
+
+ """ If the denominator is 0, store this as a float('nan'), otherwise store
+ as a fractions.Fraction(). Delegate as appropriate
+
+ """
+
+ __slots__ = ("_numerator", "_denominator", "_val")
+
+ def __init__(self, value, denominator=1):
+ """
+ :param value: either an integer numerator, a
+ float/rational/other number, or an IFDRational
+ :param denominator: Optional integer denominator
+ """
+ if isinstance(value, IFDRational):
+ self._numerator = value.numerator
+ self._denominator = value.denominator
+ self._val = value._val
+ return
+
+ if isinstance(value, Fraction):
+ self._numerator = value.numerator
+ self._denominator = value.denominator
+ else:
+ self._numerator = value
+ self._denominator = denominator
+
+ if denominator == 0:
+ self._val = float("nan")
+ elif denominator == 1:
+ self._val = Fraction(value)
+ else:
+ self._val = Fraction(value, denominator)
+
+ @property
+ def numerator(self):
+ return self._numerator
+
+ @property
+ def denominator(self):
+ return self._denominator
+
+ def limit_rational(self, max_denominator):
+ """
+
+ :param max_denominator: Integer, the maximum denominator value
+ :returns: Tuple of (numerator, denominator)
+ """
+
+ if self.denominator == 0:
+ return self.numerator, self.denominator
+
+ f = self._val.limit_denominator(max_denominator)
+ return f.numerator, f.denominator
+
+ def __repr__(self):
+ return str(float(self._val))
+
+ def __hash__(self):
+ return self._val.__hash__()
+
+ def __eq__(self, other):
+ val = self._val
+ if isinstance(other, IFDRational):
+ other = other._val
+ if isinstance(other, float):
+ val = float(val)
+ return val == other
+
+ def __getstate__(self):
+ return [self._val, self._numerator, self._denominator]
+
+ def __setstate__(self, state):
+ IFDRational.__init__(self, 0)
+ _val, _numerator, _denominator = state
+ self._val = _val
+ self._numerator = _numerator
+ self._denominator = _denominator
+
+ def _delegate(op):
+ def delegate(self, *args):
+ return getattr(self._val, op)(*args)
+
+ return delegate
+
+ """ a = ['add','radd', 'sub', 'rsub', 'mul', 'rmul',
+ 'truediv', 'rtruediv', 'floordiv', 'rfloordiv',
+ 'mod','rmod', 'pow','rpow', 'pos', 'neg',
+ 'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'bool',
+ 'ceil', 'floor', 'round']
+ print("\n".join("__%s__ = _delegate('__%s__')" % (s,s) for s in a))
+ """
+
+ __add__ = _delegate("__add__")
+ __radd__ = _delegate("__radd__")
+ __sub__ = _delegate("__sub__")
+ __rsub__ = _delegate("__rsub__")
+ __mul__ = _delegate("__mul__")
+ __rmul__ = _delegate("__rmul__")
+ __truediv__ = _delegate("__truediv__")
+ __rtruediv__ = _delegate("__rtruediv__")
+ __floordiv__ = _delegate("__floordiv__")
+ __rfloordiv__ = _delegate("__rfloordiv__")
+ __mod__ = _delegate("__mod__")
+ __rmod__ = _delegate("__rmod__")
+ __pow__ = _delegate("__pow__")
+ __rpow__ = _delegate("__rpow__")
+ __pos__ = _delegate("__pos__")
+ __neg__ = _delegate("__neg__")
+ __abs__ = _delegate("__abs__")
+ __trunc__ = _delegate("__trunc__")
+ __lt__ = _delegate("__lt__")
+ __gt__ = _delegate("__gt__")
+ __le__ = _delegate("__le__")
+ __ge__ = _delegate("__ge__")
+ __bool__ = _delegate("__bool__")
+ __ceil__ = _delegate("__ceil__")
+ __floor__ = _delegate("__floor__")
+ __round__ = _delegate("__round__")
+ # Python >= 3.11
+ if hasattr(Fraction, "__int__"):
+ __int__ = _delegate("__int__")
+
+
+class ImageFileDirectory_v2(MutableMapping):
+ """This class represents a TIFF tag directory. To speed things up, we
+ don't decode tags unless they're asked for.
+
+ Exposes a dictionary interface of the tags in the directory::
+
+ ifd = ImageFileDirectory_v2()
+ ifd[key] = 'Some Data'
+ ifd.tagtype[key] = TiffTags.ASCII
+ print(ifd[key])
+ 'Some Data'
+
+ Individual values are returned as the strings or numbers, sequences are
+ returned as tuples of the values.
+
+ The tiff metadata type of each item is stored in a dictionary of
+ tag types in
+ :attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v2.tagtype`. The types
+ are read from a tiff file, guessed from the type added, or added
+ manually.
+
+ Data Structures:
+
+ * ``self.tagtype = {}``
+
+ * Key: numerical TIFF tag number
+ * Value: integer corresponding to the data type from
+ :py:data:`.TiffTags.TYPES`
+
+ .. versionadded:: 3.0.0
+
+ 'Internal' data structures:
+
+ * ``self._tags_v2 = {}``
+
+ * Key: numerical TIFF tag number
+ * Value: decoded data, as tuple for multiple values
+
+ * ``self._tagdata = {}``
+
+ * Key: numerical TIFF tag number
+ * Value: undecoded byte string from file
+
+ * ``self._tags_v1 = {}``
+
+ * Key: numerical TIFF tag number
+ * Value: decoded data in the v1 format
+
+ Tags will be found in the private attributes ``self._tagdata``, and in
+ ``self._tags_v2`` once decoded.
+
+ ``self.legacy_api`` is a value for internal use, and shouldn't be changed
+ from outside code. In cooperation with
+ :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`, if ``legacy_api``
+ is true, then decoded tags will be populated into both ``_tags_v1`` and
+ ``_tags_v2``. ``_tags_v2`` will be used if this IFD is used in the TIFF
+ save routine. Tags should be read from ``_tags_v1`` if
+ ``legacy_api == true``.
+
+ """
+
+ def __init__(self, ifh=b"II\052\0\0\0\0\0", prefix=None, group=None):
+ """Initialize an ImageFileDirectory.
+
+ To construct an ImageFileDirectory from a real file, pass the 8-byte
+ magic header to the constructor. To only set the endianness, pass it
+ as the 'prefix' keyword argument.
+
+ :param ifh: One of the accepted magic headers (cf. PREFIXES); also sets
+ endianness.
+ :param prefix: Override the endianness of the file.
+ """
+ if not _accept(ifh):
+ msg = f"not a TIFF file (header {repr(ifh)} not valid)"
+ raise SyntaxError(msg)
+ self._prefix = prefix if prefix is not None else ifh[:2]
+ if self._prefix == MM:
+ self._endian = ">"
+ elif self._prefix == II:
+ self._endian = "<"
+ else:
+ msg = "not a TIFF IFD"
+ raise SyntaxError(msg)
+ self._bigtiff = ifh[2] == 43
+ self.group = group
+ self.tagtype = {}
+ """ Dictionary of tag types """
+ self.reset()
+ (self.next,) = (
+ self._unpack("Q", ifh[8:]) if self._bigtiff else self._unpack("L", ifh[4:])
+ )
+ self._legacy_api = False
+
+ prefix = property(lambda self: self._prefix)
+ offset = property(lambda self: self._offset)
+ legacy_api = property(lambda self: self._legacy_api)
+
+ @legacy_api.setter
+ def legacy_api(self, value):
+ msg = "Not allowing setting of legacy api"
+ raise Exception(msg)
+
+ def reset(self):
+ self._tags_v1 = {} # will remain empty if legacy_api is false
+ self._tags_v2 = {} # main tag storage
+ self._tagdata = {}
+ self.tagtype = {} # added 2008-06-05 by Florian Hoech
+ self._next = None
+ self._offset = None
+
+ def __str__(self):
+ return str(dict(self))
+
+ def named(self):
+ """
+ :returns: dict of name|key: value
+
+ Returns the complete tag dictionary, with named tags where possible.
+ """
+ return {
+ TiffTags.lookup(code, self.group).name: value
+ for code, value in self.items()
+ }
+
+ def __len__(self):
+ return len(set(self._tagdata) | set(self._tags_v2))
+
+ def __getitem__(self, tag):
+ if tag not in self._tags_v2: # unpack on the fly
+ data = self._tagdata[tag]
+ typ = self.tagtype[tag]
+ size, handler = self._load_dispatch[typ]
+ self[tag] = handler(self, data, self.legacy_api) # check type
+ val = self._tags_v2[tag]
+ if self.legacy_api and not isinstance(val, (tuple, bytes)):
+ val = (val,)
+ return val
+
+ def __contains__(self, tag):
+ return tag in self._tags_v2 or tag in self._tagdata
+
+ def __setitem__(self, tag, value):
+ self._setitem(tag, value, self.legacy_api)
+
+ def _setitem(self, tag, value, legacy_api):
+ basetypes = (Number, bytes, str)
+
+ info = TiffTags.lookup(tag, self.group)
+ values = [value] if isinstance(value, basetypes) else value
+
+ if tag not in self.tagtype:
+ if info.type:
+ self.tagtype[tag] = info.type
+ else:
+ self.tagtype[tag] = TiffTags.UNDEFINED
+ if all(isinstance(v, IFDRational) for v in values):
+ self.tagtype[tag] = (
+ TiffTags.RATIONAL
+ if all(v >= 0 for v in values)
+ else TiffTags.SIGNED_RATIONAL
+ )
+ elif all(isinstance(v, int) for v in values):
+ if all(0 <= v < 2**16 for v in values):
+ self.tagtype[tag] = TiffTags.SHORT
+ elif all(-(2**15) < v < 2**15 for v in values):
+ self.tagtype[tag] = TiffTags.SIGNED_SHORT
+ else:
+ self.tagtype[tag] = (
+ TiffTags.LONG
+ if all(v >= 0 for v in values)
+ else TiffTags.SIGNED_LONG
+ )
+ elif all(isinstance(v, float) for v in values):
+ self.tagtype[tag] = TiffTags.DOUBLE
+ elif all(isinstance(v, str) for v in values):
+ self.tagtype[tag] = TiffTags.ASCII
+ elif all(isinstance(v, bytes) for v in values):
+ self.tagtype[tag] = TiffTags.BYTE
+
+ if self.tagtype[tag] == TiffTags.UNDEFINED:
+ values = [
+ v.encode("ascii", "replace") if isinstance(v, str) else v
+ for v in values
+ ]
+ elif self.tagtype[tag] == TiffTags.RATIONAL:
+ values = [float(v) if isinstance(v, int) else v for v in values]
+
+ is_ifd = self.tagtype[tag] == TiffTags.LONG and isinstance(values, dict)
+ if not is_ifd:
+ values = tuple(info.cvt_enum(value) for value in values)
+
+ dest = self._tags_v1 if legacy_api else self._tags_v2
+
+ # Three branches:
+ # Spec'd length == 1, Actual length 1, store as element
+ # Spec'd length == 1, Actual > 1, Warn and truncate. Formerly barfed.
+ # No Spec, Actual length 1, Formerly (<4.2) returned a 1 element tuple.
+ # Don't mess with the legacy api, since it's frozen.
+ if not is_ifd and (
+ (info.length == 1)
+ or self.tagtype[tag] == TiffTags.BYTE
+ or (info.length is None and len(values) == 1 and not legacy_api)
+ ):
+ # Don't mess with the legacy api, since it's frozen.
+ if legacy_api and self.tagtype[tag] in [
+ TiffTags.RATIONAL,
+ TiffTags.SIGNED_RATIONAL,
+ ]: # rationals
+ values = (values,)
+ try:
+ (dest[tag],) = values
+ except ValueError:
+ # We've got a builtin tag with 1 expected entry
+ warnings.warn(
+ f"Metadata Warning, tag {tag} had too many entries: "
+ f"{len(values)}, expected 1"
+ )
+ dest[tag] = values[0]
+
+ else:
+ # Spec'd length > 1 or undefined
+ # Unspec'd, and length > 1
+ dest[tag] = values
+
+ def __delitem__(self, tag):
+ self._tags_v2.pop(tag, None)
+ self._tags_v1.pop(tag, None)
+ self._tagdata.pop(tag, None)
+
+ def __iter__(self):
+ return iter(set(self._tagdata) | set(self._tags_v2))
+
+ def _unpack(self, fmt, data):
+ return struct.unpack(self._endian + fmt, data)
+
+ def _pack(self, fmt, *values):
+ return struct.pack(self._endian + fmt, *values)
+
+ def _register_loader(idx, size):
+ def decorator(func):
+ from .TiffTags import TYPES
+
+ if func.__name__.startswith("load_"):
+ TYPES[idx] = func.__name__[5:].replace("_", " ")
+ _load_dispatch[idx] = size, func # noqa: F821
+ return func
+
+ return decorator
+
+ def _register_writer(idx):
+ def decorator(func):
+ _write_dispatch[idx] = func # noqa: F821
+ return func
+
+ return decorator
+
+ def _register_basic(idx_fmt_name):
+ from .TiffTags import TYPES
+
+ idx, fmt, name = idx_fmt_name
+ TYPES[idx] = name
+ size = struct.calcsize("=" + fmt)
+ _load_dispatch[idx] = ( # noqa: F821
+ size,
+ lambda self, data, legacy_api=True: (
+ self._unpack(f"{len(data) // size}{fmt}", data)
+ ),
+ )
+ _write_dispatch[idx] = lambda self, *values: ( # noqa: F821
+ b"".join(self._pack(fmt, value) for value in values)
+ )
+
+ list(
+ map(
+ _register_basic,
+ [
+ (TiffTags.SHORT, "H", "short"),
+ (TiffTags.LONG, "L", "long"),
+ (TiffTags.SIGNED_BYTE, "b", "signed byte"),
+ (TiffTags.SIGNED_SHORT, "h", "signed short"),
+ (TiffTags.SIGNED_LONG, "l", "signed long"),
+ (TiffTags.FLOAT, "f", "float"),
+ (TiffTags.DOUBLE, "d", "double"),
+ (TiffTags.IFD, "L", "long"),
+ (TiffTags.LONG8, "Q", "long8"),
+ ],
+ )
+ )
+
+ @_register_loader(1, 1) # Basic type, except for the legacy API.
+ def load_byte(self, data, legacy_api=True):
+ return data
+
+ @_register_writer(1) # Basic type, except for the legacy API.
+ def write_byte(self, data):
+ if isinstance(data, IFDRational):
+ data = int(data)
+ if isinstance(data, int):
+ data = bytes((data,))
+ return data
+
+ @_register_loader(2, 1)
+ def load_string(self, data, legacy_api=True):
+ if data.endswith(b"\0"):
+ data = data[:-1]
+ return data.decode("latin-1", "replace")
+
+ @_register_writer(2)
+ def write_string(self, value):
+ # remerge of https://github.com/python-pillow/Pillow/pull/1416
+ if isinstance(value, int):
+ value = str(value)
+ if not isinstance(value, bytes):
+ value = value.encode("ascii", "replace")
+ return value + b"\0"
+
+ @_register_loader(5, 8)
+ def load_rational(self, data, legacy_api=True):
+ vals = self._unpack(f"{len(data) // 4}L", data)
+
+ def combine(a, b):
+ return (a, b) if legacy_api else IFDRational(a, b)
+
+ return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2]))
+
+ @_register_writer(5)
+ def write_rational(self, *values):
+ return b"".join(
+ self._pack("2L", *_limit_rational(frac, 2**32 - 1)) for frac in values
+ )
+
+ @_register_loader(7, 1)
+ def load_undefined(self, data, legacy_api=True):
+ return data
+
+ @_register_writer(7)
+ def write_undefined(self, value):
+ if isinstance(value, int):
+ value = str(value).encode("ascii", "replace")
+ return value
+
+ @_register_loader(10, 8)
+ def load_signed_rational(self, data, legacy_api=True):
+ vals = self._unpack(f"{len(data) // 4}l", data)
+
+ def combine(a, b):
+ return (a, b) if legacy_api else IFDRational(a, b)
+
+ return tuple(combine(num, denom) for num, denom in zip(vals[::2], vals[1::2]))
+
+ @_register_writer(10)
+ def write_signed_rational(self, *values):
+ return b"".join(
+ self._pack("2l", *_limit_signed_rational(frac, 2**31 - 1, -(2**31)))
+ for frac in values
+ )
+
+ def _ensure_read(self, fp, size):
+ ret = fp.read(size)
+ if len(ret) != size:
+ msg = (
+ "Corrupt EXIF data. "
+ f"Expecting to read {size} bytes but only got {len(ret)}. "
+ )
+ raise OSError(msg)
+ return ret
+
+ def load(self, fp):
+ self.reset()
+ self._offset = fp.tell()
+
+ try:
+ tag_count = (
+ self._unpack("Q", self._ensure_read(fp, 8))
+ if self._bigtiff
+ else self._unpack("H", self._ensure_read(fp, 2))
+ )[0]
+ for i in range(tag_count):
+ tag, typ, count, data = (
+ self._unpack("HHQ8s", self._ensure_read(fp, 20))
+ if self._bigtiff
+ else self._unpack("HHL4s", self._ensure_read(fp, 12))
+ )
+
+ tagname = TiffTags.lookup(tag, self.group).name
+ typname = TYPES.get(typ, "unknown")
+ msg = f"tag: {tagname} ({tag}) - type: {typname} ({typ})"
+
+ try:
+ unit_size, handler = self._load_dispatch[typ]
+ except KeyError:
+ logger.debug("%s - unsupported type %s", msg, typ)
+ continue # ignore unsupported type
+ size = count * unit_size
+ if size > (8 if self._bigtiff else 4):
+ here = fp.tell()
+ (offset,) = self._unpack("Q" if self._bigtiff else "L", data)
+ msg += f" Tag Location: {here} - Data Location: {offset}"
+ fp.seek(offset)
+ data = ImageFile._safe_read(fp, size)
+ fp.seek(here)
+ else:
+ data = data[:size]
+
+ if len(data) != size:
+ warnings.warn(
+ "Possibly corrupt EXIF data. "
+ f"Expecting to read {size} bytes but only got {len(data)}."
+ f" Skipping tag {tag}"
+ )
+ logger.debug(msg)
+ continue
+
+ if not data:
+ logger.debug(msg)
+ continue
+
+ self._tagdata[tag] = data
+ self.tagtype[tag] = typ
+
+ msg += " - value: " + (
+ "" % size if size > 32 else repr(data)
+ )
+ logger.debug(msg)
+
+ (self.next,) = (
+ self._unpack("Q", self._ensure_read(fp, 8))
+ if self._bigtiff
+ else self._unpack("L", self._ensure_read(fp, 4))
+ )
+ except OSError as msg:
+ warnings.warn(str(msg))
+ return
+
+ def tobytes(self, offset=0):
+ # FIXME What about tagdata?
+ result = self._pack("H", len(self._tags_v2))
+
+ entries = []
+ offset = offset + len(result) + len(self._tags_v2) * 12 + 4
+ stripoffsets = None
+
+ # pass 1: convert tags to binary format
+ # always write tags in ascending order
+ for tag, value in sorted(self._tags_v2.items()):
+ if tag == STRIPOFFSETS:
+ stripoffsets = len(entries)
+ typ = self.tagtype.get(tag)
+ logger.debug("Tag %s, Type: %s, Value: %s", tag, typ, repr(value))
+ is_ifd = typ == TiffTags.LONG and isinstance(value, dict)
+ if is_ifd:
+ if self._endian == "<":
+ ifh = b"II\x2A\x00\x08\x00\x00\x00"
+ else:
+ ifh = b"MM\x00\x2A\x00\x00\x00\x08"
+ ifd = ImageFileDirectory_v2(ifh, group=tag)
+ values = self._tags_v2[tag]
+ for ifd_tag, ifd_value in values.items():
+ ifd[ifd_tag] = ifd_value
+ data = ifd.tobytes(offset)
+ else:
+ values = value if isinstance(value, tuple) else (value,)
+ data = self._write_dispatch[typ](self, *values)
+
+ tagname = TiffTags.lookup(tag, self.group).name
+ typname = "ifd" if is_ifd else TYPES.get(typ, "unknown")
+ msg = f"save: {tagname} ({tag}) - type: {typname} ({typ})"
+ msg += " - value: " + (
+ "" % len(data) if len(data) >= 16 else str(values)
+ )
+ logger.debug(msg)
+
+ # count is sum of lengths for string and arbitrary data
+ if is_ifd:
+ count = 1
+ elif typ in [TiffTags.BYTE, TiffTags.ASCII, TiffTags.UNDEFINED]:
+ count = len(data)
+ else:
+ count = len(values)
+ # figure out if data fits into the entry
+ if len(data) <= 4:
+ entries.append((tag, typ, count, data.ljust(4, b"\0"), b""))
+ else:
+ entries.append((tag, typ, count, self._pack("L", offset), data))
+ offset += (len(data) + 1) // 2 * 2 # pad to word
+
+ # update strip offset data to point beyond auxiliary data
+ if stripoffsets is not None:
+ tag, typ, count, value, data = entries[stripoffsets]
+ if data:
+ msg = "multistrip support not yet implemented"
+ raise NotImplementedError(msg)
+ value = self._pack("L", self._unpack("L", value)[0] + offset)
+ entries[stripoffsets] = tag, typ, count, value, data
+
+ # pass 2: write entries to file
+ for tag, typ, count, value, data in entries:
+ logger.debug("%s %s %s %s %s", tag, typ, count, repr(value), repr(data))
+ result += self._pack("HHL4s", tag, typ, count, value)
+
+ # -- overwrite here for multi-page --
+ result += b"\0\0\0\0" # end of entries
+
+ # pass 3: write auxiliary data to file
+ for tag, typ, count, value, data in entries:
+ result += data
+ if len(data) & 1:
+ result += b"\0"
+
+ return result
+
+ def save(self, fp):
+ if fp.tell() == 0: # skip TIFF header on subsequent pages
+ # tiff header -- PIL always starts the first IFD at offset 8
+ fp.write(self._prefix + self._pack("HL", 42, 8))
+
+ offset = fp.tell()
+ result = self.tobytes(offset)
+ fp.write(result)
+ return offset + len(result)
+
+
+ImageFileDirectory_v2._load_dispatch = _load_dispatch
+ImageFileDirectory_v2._write_dispatch = _write_dispatch
+for idx, name in TYPES.items():
+ name = name.replace(" ", "_")
+ setattr(ImageFileDirectory_v2, "load_" + name, _load_dispatch[idx][1])
+ setattr(ImageFileDirectory_v2, "write_" + name, _write_dispatch[idx])
+del _load_dispatch, _write_dispatch, idx, name
+
+
+# Legacy ImageFileDirectory support.
+class ImageFileDirectory_v1(ImageFileDirectory_v2):
+ """This class represents the **legacy** interface to a TIFF tag directory.
+
+ Exposes a dictionary interface of the tags in the directory::
+
+ ifd = ImageFileDirectory_v1()
+ ifd[key] = 'Some Data'
+ ifd.tagtype[key] = TiffTags.ASCII
+ print(ifd[key])
+ ('Some Data',)
+
+ Also contains a dictionary of tag types as read from the tiff image file,
+ :attr:`~PIL.TiffImagePlugin.ImageFileDirectory_v1.tagtype`.
+
+ Values are returned as a tuple.
+
+ .. deprecated:: 3.0.0
+ """
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._legacy_api = True
+
+ tags = property(lambda self: self._tags_v1)
+ tagdata = property(lambda self: self._tagdata)
+
+ # defined in ImageFileDirectory_v2
+ tagtype: dict
+ """Dictionary of tag types"""
+
+ @classmethod
+ def from_v2(cls, original):
+ """Returns an
+ :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`
+ instance with the same data as is contained in the original
+ :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
+ instance.
+
+ :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`
+
+ """
+
+ ifd = cls(prefix=original.prefix)
+ ifd._tagdata = original._tagdata
+ ifd.tagtype = original.tagtype
+ ifd.next = original.next # an indicator for multipage tiffs
+ return ifd
+
+ def to_v2(self):
+ """Returns an
+ :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
+ instance with the same data as is contained in the original
+ :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1`
+ instance.
+
+ :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2`
+
+ """
+
+ ifd = ImageFileDirectory_v2(prefix=self.prefix)
+ ifd._tagdata = dict(self._tagdata)
+ ifd.tagtype = dict(self.tagtype)
+ ifd._tags_v2 = dict(self._tags_v2)
+ return ifd
+
+ def __contains__(self, tag):
+ return tag in self._tags_v1 or tag in self._tagdata
+
+ def __len__(self):
+ return len(set(self._tagdata) | set(self._tags_v1))
+
+ def __iter__(self):
+ return iter(set(self._tagdata) | set(self._tags_v1))
+
+ def __setitem__(self, tag, value):
+ for legacy_api in (False, True):
+ self._setitem(tag, value, legacy_api)
+
+ def __getitem__(self, tag):
+ if tag not in self._tags_v1: # unpack on the fly
+ data = self._tagdata[tag]
+ typ = self.tagtype[tag]
+ size, handler = self._load_dispatch[typ]
+ for legacy in (False, True):
+ self._setitem(tag, handler(self, data, legacy), legacy)
+ val = self._tags_v1[tag]
+ if not isinstance(val, (tuple, bytes)):
+ val = (val,)
+ return val
+
+
+# undone -- switch this pointer when IFD_LEGACY_API == False
+ImageFileDirectory = ImageFileDirectory_v1
+
+
+##
+# Image plugin for TIFF files.
+
+
+class TiffImageFile(ImageFile.ImageFile):
+ format = "TIFF"
+ format_description = "Adobe TIFF"
+ _close_exclusive_fp_after_loading = False
+
+ def __init__(self, fp=None, filename=None):
+ self.tag_v2 = None
+ """ Image file directory (tag dictionary) """
+
+ self.tag = None
+ """ Legacy tag entries """
+
+ super().__init__(fp, filename)
+
+ def _open(self):
+ """Open the first image in a TIFF file"""
+
+ # Header
+ ifh = self.fp.read(8)
+ if ifh[2] == 43:
+ ifh += self.fp.read(8)
+
+ self.tag_v2 = ImageFileDirectory_v2(ifh)
+
+ # legacy IFD entries will be filled in later
+ self.ifd = None
+
+ # setup frame pointers
+ self.__first = self.__next = self.tag_v2.next
+ self.__frame = -1
+ self._fp = self.fp
+ self._frame_pos = []
+ self._n_frames = None
+
+ logger.debug("*** TiffImageFile._open ***")
+ logger.debug("- __first: %s", self.__first)
+ logger.debug("- ifh: %s", repr(ifh)) # Use repr to avoid str(bytes)
+
+ # and load the first frame
+ self._seek(0)
+
+ @property
+ def n_frames(self):
+ if self._n_frames is None:
+ current = self.tell()
+ self._seek(len(self._frame_pos))
+ while self._n_frames is None:
+ self._seek(self.tell() + 1)
+ self.seek(current)
+ return self._n_frames
+
+ def seek(self, frame):
+ """Select a given frame as current image"""
+ if not self._seek_check(frame):
+ return
+ self._seek(frame)
+ # Create a new core image object on second and
+ # subsequent frames in the image. Image may be
+ # different size/mode.
+ Image._decompression_bomb_check(self.size)
+ self.im = Image.core.new(self.mode, self.size)
+
+ def _seek(self, frame):
+ self.fp = self._fp
+
+ # reset buffered io handle in case fp
+ # was passed to libtiff, invalidating the buffer
+ self.fp.tell()
+
+ while len(self._frame_pos) <= frame:
+ if not self.__next:
+ msg = "no more images in TIFF file"
+ raise EOFError(msg)
+ logger.debug(
+ "Seeking to frame %s, on frame %s, __next %s, location: %s",
+ frame,
+ self.__frame,
+ self.__next,
+ self.fp.tell(),
+ )
+ self.fp.seek(self.__next)
+ self._frame_pos.append(self.__next)
+ logger.debug("Loading tags, location: %s", self.fp.tell())
+ self.tag_v2.load(self.fp)
+ if self.tag_v2.next in self._frame_pos:
+ # This IFD has already been processed
+ # Declare this to be the end of the image
+ self.__next = 0
+ else:
+ self.__next = self.tag_v2.next
+ if self.__next == 0:
+ self._n_frames = frame + 1
+ if len(self._frame_pos) == 1:
+ self.is_animated = self.__next != 0
+ self.__frame += 1
+ self.fp.seek(self._frame_pos[frame])
+ self.tag_v2.load(self.fp)
+ self._reload_exif()
+ # fill the legacy tag/ifd entries
+ self.tag = self.ifd = ImageFileDirectory_v1.from_v2(self.tag_v2)
+ self.__frame = frame
+ self._setup()
+
+ def tell(self):
+ """Return the current frame number"""
+ return self.__frame
+
+ def getxmp(self):
+ """
+ Returns a dictionary containing the XMP tags.
+ Requires defusedxml to be installed.
+
+ :returns: XMP tags in a dictionary.
+ """
+ return self._getxmp(self.tag_v2[XMP]) if XMP in self.tag_v2 else {}
+
+ def get_photoshop_blocks(self):
+ """
+ Returns a dictionary of Photoshop "Image Resource Blocks".
+ The keys are the image resource ID. For more information, see
+ https://www.adobe.com/devnet-apps/photoshop/fileformatashtml/#50577409_pgfId-1037727
+
+ :returns: Photoshop "Image Resource Blocks" in a dictionary.
+ """
+ blocks = {}
+ val = self.tag_v2.get(ExifTags.Base.ImageResources)
+ if val:
+ while val[:4] == b"8BIM":
+ id = i16(val[4:6])
+ n = math.ceil((val[6] + 1) / 2) * 2
+ size = i32(val[6 + n : 10 + n])
+ data = val[10 + n : 10 + n + size]
+ blocks[id] = {"data": data}
+
+ val = val[math.ceil((10 + n + size) / 2) * 2 :]
+ return blocks
+
+ def load(self):
+ if self.tile and self.use_load_libtiff:
+ return self._load_libtiff()
+ return super().load()
+
+ def load_end(self):
+ # allow closing if we're on the first frame, there's no next
+ # This is the ImageFile.load path only, libtiff specific below.
+ if not self.is_animated:
+ self._close_exclusive_fp_after_loading = True
+
+ # reset buffered io handle in case fp
+ # was passed to libtiff, invalidating the buffer
+ self.fp.tell()
+
+ # load IFD data from fp before it is closed
+ exif = self.getexif()
+ for key in TiffTags.TAGS_V2_GROUPS:
+ if key not in exif:
+ continue
+ exif.get_ifd(key)
+
+ ImageOps.exif_transpose(self, in_place=True)
+ if ExifTags.Base.Orientation in self.tag_v2:
+ del self.tag_v2[ExifTags.Base.Orientation]
+
+ def _load_libtiff(self):
+ """Overload method triggered when we detect a compressed tiff
+ Calls out to libtiff"""
+
+ Image.Image.load(self)
+
+ self.load_prepare()
+
+ if not len(self.tile) == 1:
+ msg = "Not exactly one tile"
+ raise OSError(msg)
+
+ # (self._compression, (extents tuple),
+ # 0, (rawmode, self._compression, fp))
+ extents = self.tile[0][1]
+ args = list(self.tile[0][3])
+
+ # To be nice on memory footprint, if there's a
+ # file descriptor, use that instead of reading
+ # into a string in python.
+ try:
+ fp = hasattr(self.fp, "fileno") and self.fp.fileno()
+ # flush the file descriptor, prevents error on pypy 2.4+
+ # should also eliminate the need for fp.tell
+ # in _seek
+ if hasattr(self.fp, "flush"):
+ self.fp.flush()
+ except OSError:
+ # io.BytesIO have a fileno, but returns an OSError if
+ # it doesn't use a file descriptor.
+ fp = False
+
+ if fp:
+ args[2] = fp
+
+ decoder = Image._getdecoder(
+ self.mode, "libtiff", tuple(args), self.decoderconfig
+ )
+ try:
+ decoder.setimage(self.im, extents)
+ except ValueError as e:
+ msg = "Couldn't set the image"
+ raise OSError(msg) from e
+
+ close_self_fp = self._exclusive_fp and not self.is_animated
+ if hasattr(self.fp, "getvalue"):
+ # We've got a stringio like thing passed in. Yay for all in memory.
+ # The decoder needs the entire file in one shot, so there's not
+ # a lot we can do here other than give it the entire file.
+ # unless we could do something like get the address of the
+ # underlying string for stringio.
+ #
+ # Rearranging for supporting byteio items, since they have a fileno
+ # that returns an OSError if there's no underlying fp. Easier to
+ # deal with here by reordering.
+ logger.debug("have getvalue. just sending in a string from getvalue")
+ n, err = decoder.decode(self.fp.getvalue())
+ elif fp:
+ # we've got a actual file on disk, pass in the fp.
+ logger.debug("have fileno, calling fileno version of the decoder.")
+ if not close_self_fp:
+ self.fp.seek(0)
+ # 4 bytes, otherwise the trace might error out
+ n, err = decoder.decode(b"fpfp")
+ else:
+ # we have something else.
+ logger.debug("don't have fileno or getvalue. just reading")
+ self.fp.seek(0)
+ # UNDONE -- so much for that buffer size thing.
+ n, err = decoder.decode(self.fp.read())
+
+ self.tile = []
+ self.readonly = 0
+
+ self.load_end()
+
+ if close_self_fp:
+ self.fp.close()
+ self.fp = None # might be shared
+
+ if err < 0:
+ raise OSError(err)
+
+ return Image.Image.load(self)
+
+ def _setup(self):
+ """Setup this image object based on current tags"""
+
+ if 0xBC01 in self.tag_v2:
+ msg = "Windows Media Photo files not yet supported"
+ raise OSError(msg)
+
+ # extract relevant tags
+ self._compression = COMPRESSION_INFO[self.tag_v2.get(COMPRESSION, 1)]
+ self._planar_configuration = self.tag_v2.get(PLANAR_CONFIGURATION, 1)
+
+ # photometric is a required tag, but not everyone is reading
+ # the specification
+ photo = self.tag_v2.get(PHOTOMETRIC_INTERPRETATION, 0)
+
+ # old style jpeg compression images most certainly are YCbCr
+ if self._compression == "tiff_jpeg":
+ photo = 6
+
+ fillorder = self.tag_v2.get(FILLORDER, 1)
+
+ logger.debug("*** Summary ***")
+ logger.debug("- compression: %s", self._compression)
+ logger.debug("- photometric_interpretation: %s", photo)
+ logger.debug("- planar_configuration: %s", self._planar_configuration)
+ logger.debug("- fill_order: %s", fillorder)
+ logger.debug("- YCbCr subsampling: %s", self.tag.get(YCBCRSUBSAMPLING))
+
+ # size
+ xsize = int(self.tag_v2.get(IMAGEWIDTH))
+ ysize = int(self.tag_v2.get(IMAGELENGTH))
+ self._size = xsize, ysize
+
+ logger.debug("- size: %s", self.size)
+
+ sample_format = self.tag_v2.get(SAMPLEFORMAT, (1,))
+ if len(sample_format) > 1 and max(sample_format) == min(sample_format) == 1:
+ # SAMPLEFORMAT is properly per band, so an RGB image will
+ # be (1,1,1). But, we don't support per band pixel types,
+ # and anything more than one band is a uint8. So, just
+ # take the first element. Revisit this if adding support
+ # for more exotic images.
+ sample_format = (1,)
+
+ bps_tuple = self.tag_v2.get(BITSPERSAMPLE, (1,))
+ extra_tuple = self.tag_v2.get(EXTRASAMPLES, ())
+ if photo in (2, 6, 8): # RGB, YCbCr, LAB
+ bps_count = 3
+ elif photo == 5: # CMYK
+ bps_count = 4
+ else:
+ bps_count = 1
+ bps_count += len(extra_tuple)
+ bps_actual_count = len(bps_tuple)
+ samples_per_pixel = self.tag_v2.get(
+ SAMPLESPERPIXEL,
+ 3 if self._compression == "tiff_jpeg" and photo in (2, 6) else 1,
+ )
+
+ if samples_per_pixel > MAX_SAMPLESPERPIXEL:
+ # DOS check, samples_per_pixel can be a Long, and we extend the tuple below
+ logger.error(
+ "More samples per pixel than can be decoded: %s", samples_per_pixel
+ )
+ msg = "Invalid value for samples per pixel"
+ raise SyntaxError(msg)
+
+ if samples_per_pixel < bps_actual_count:
+ # If a file has more values in bps_tuple than expected,
+ # remove the excess.
+ bps_tuple = bps_tuple[:samples_per_pixel]
+ elif samples_per_pixel > bps_actual_count and bps_actual_count == 1:
+ # If a file has only one value in bps_tuple, when it should have more,
+ # presume it is the same number of bits for all of the samples.
+ bps_tuple = bps_tuple * samples_per_pixel
+
+ if len(bps_tuple) != samples_per_pixel:
+ msg = "unknown data organization"
+ raise SyntaxError(msg)
+
+ # mode: check photometric interpretation and bits per pixel
+ key = (
+ self.tag_v2.prefix,
+ photo,
+ sample_format,
+ fillorder,
+ bps_tuple,
+ extra_tuple,
+ )
+ logger.debug("format key: %s", key)
+ try:
+ self._mode, rawmode = OPEN_INFO[key]
+ except KeyError as e:
+ logger.debug("- unsupported format")
+ msg = "unknown pixel mode"
+ raise SyntaxError(msg) from e
+
+ logger.debug("- raw mode: %s", rawmode)
+ logger.debug("- pil mode: %s", self.mode)
+
+ self.info["compression"] = self._compression
+
+ xres = self.tag_v2.get(X_RESOLUTION, 1)
+ yres = self.tag_v2.get(Y_RESOLUTION, 1)
+
+ if xres and yres:
+ resunit = self.tag_v2.get(RESOLUTION_UNIT)
+ if resunit == 2: # dots per inch
+ self.info["dpi"] = (xres, yres)
+ elif resunit == 3: # dots per centimeter. convert to dpi
+ self.info["dpi"] = (xres * 2.54, yres * 2.54)
+ elif resunit is None: # used to default to 1, but now 2)
+ self.info["dpi"] = (xres, yres)
+ # For backward compatibility,
+ # we also preserve the old behavior
+ self.info["resolution"] = xres, yres
+ else: # No absolute unit of measurement
+ self.info["resolution"] = xres, yres
+
+ # build tile descriptors
+ x = y = layer = 0
+ self.tile = []
+ self.use_load_libtiff = READ_LIBTIFF or self._compression != "raw"
+ if self.use_load_libtiff:
+ # Decoder expects entire file as one tile.
+ # There's a buffer size limit in load (64k)
+ # so large g4 images will fail if we use that
+ # function.
+ #
+ # Setup the one tile for the whole image, then
+ # use the _load_libtiff function.
+
+ # libtiff handles the fillmode for us, so 1;IR should
+ # actually be 1;I. Including the R double reverses the
+ # bits, so stripes of the image are reversed. See
+ # https://github.com/python-pillow/Pillow/issues/279
+ if fillorder == 2:
+ # Replace fillorder with fillorder=1
+ key = key[:3] + (1,) + key[4:]
+ logger.debug("format key: %s", key)
+ # this should always work, since all the
+ # fillorder==2 modes have a corresponding
+ # fillorder=1 mode
+ self._mode, rawmode = OPEN_INFO[key]
+ # libtiff always returns the bytes in native order.
+ # we're expecting image byte order. So, if the rawmode
+ # contains I;16, we need to convert from native to image
+ # byte order.
+ if rawmode == "I;16":
+ rawmode = "I;16N"
+ if ";16B" in rawmode:
+ rawmode = rawmode.replace(";16B", ";16N")
+ if ";16L" in rawmode:
+ rawmode = rawmode.replace(";16L", ";16N")
+
+ # YCbCr images with new jpeg compression with pixels in one plane
+ # unpacked straight into RGB values
+ if (
+ photo == 6
+ and self._compression == "jpeg"
+ and self._planar_configuration == 1
+ ):
+ rawmode = "RGB"
+
+ # Offset in the tile tuple is 0, we go from 0,0 to
+ # w,h, and we only do this once -- eds
+ a = (rawmode, self._compression, False, self.tag_v2.offset)
+ self.tile.append(("libtiff", (0, 0, xsize, ysize), 0, a))
+
+ elif STRIPOFFSETS in self.tag_v2 or TILEOFFSETS in self.tag_v2:
+ # striped image
+ if STRIPOFFSETS in self.tag_v2:
+ offsets = self.tag_v2[STRIPOFFSETS]
+ h = self.tag_v2.get(ROWSPERSTRIP, ysize)
+ w = self.size[0]
+ else:
+ # tiled image
+ offsets = self.tag_v2[TILEOFFSETS]
+ w = self.tag_v2.get(TILEWIDTH)
+ h = self.tag_v2.get(TILELENGTH)
+
+ for offset in offsets:
+ if x + w > xsize:
+ stride = w * sum(bps_tuple) / 8 # bytes per line
+ else:
+ stride = 0
+
+ tile_rawmode = rawmode
+ if self._planar_configuration == 2:
+ # each band on it's own layer
+ tile_rawmode = rawmode[layer]
+ # adjust stride width accordingly
+ stride /= bps_count
+
+ a = (tile_rawmode, int(stride), 1)
+ self.tile.append(
+ (
+ self._compression,
+ (x, y, min(x + w, xsize), min(y + h, ysize)),
+ offset,
+ a,
+ )
+ )
+ x = x + w
+ if x >= self.size[0]:
+ x, y = 0, y + h
+ if y >= self.size[1]:
+ x = y = 0
+ layer += 1
+ else:
+ logger.debug("- unsupported data organization")
+ msg = "unknown data organization"
+ raise SyntaxError(msg)
+
+ # Fix up info.
+ if ICCPROFILE in self.tag_v2:
+ self.info["icc_profile"] = self.tag_v2[ICCPROFILE]
+
+ # fixup palette descriptor
+
+ if self.mode in ["P", "PA"]:
+ palette = [o8(b // 256) for b in self.tag_v2[COLORMAP]]
+ self.palette = ImagePalette.raw("RGB;L", b"".join(palette))
+
+
+#
+# --------------------------------------------------------------------
+# Write TIFF files
+
+# little endian is default except for image modes with
+# explicit big endian byte-order
+
+SAVE_INFO = {
+ # mode => rawmode, byteorder, photometrics,
+ # sampleformat, bitspersample, extra
+ "1": ("1", II, 1, 1, (1,), None),
+ "L": ("L", II, 1, 1, (8,), None),
+ "LA": ("LA", II, 1, 1, (8, 8), 2),
+ "P": ("P", II, 3, 1, (8,), None),
+ "PA": ("PA", II, 3, 1, (8, 8), 2),
+ "I": ("I;32S", II, 1, 2, (32,), None),
+ "I;16": ("I;16", II, 1, 1, (16,), None),
+ "I;16S": ("I;16S", II, 1, 2, (16,), None),
+ "F": ("F;32F", II, 1, 3, (32,), None),
+ "RGB": ("RGB", II, 2, 1, (8, 8, 8), None),
+ "RGBX": ("RGBX", II, 2, 1, (8, 8, 8, 8), 0),
+ "RGBA": ("RGBA", II, 2, 1, (8, 8, 8, 8), 2),
+ "CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None),
+ "YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None),
+ "LAB": ("LAB", II, 8, 1, (8, 8, 8), None),
+ "I;32BS": ("I;32BS", MM, 1, 2, (32,), None),
+ "I;16B": ("I;16B", MM, 1, 1, (16,), None),
+ "I;16BS": ("I;16BS", MM, 1, 2, (16,), None),
+ "F;32BF": ("F;32BF", MM, 1, 3, (32,), None),
+}
+
+
+def _save(im, fp, filename):
+ try:
+ rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode]
+ except KeyError as e:
+ msg = f"cannot write mode {im.mode} as TIFF"
+ raise OSError(msg) from e
+
+ ifd = ImageFileDirectory_v2(prefix=prefix)
+
+ encoderinfo = im.encoderinfo
+ encoderconfig = im.encoderconfig
+ try:
+ compression = encoderinfo["compression"]
+ except KeyError:
+ compression = im.info.get("compression")
+ if isinstance(compression, int):
+ # compression value may be from BMP. Ignore it
+ compression = None
+ if compression is None:
+ compression = "raw"
+ elif compression == "tiff_jpeg":
+ # OJPEG is obsolete, so use new-style JPEG compression instead
+ compression = "jpeg"
+ elif compression == "tiff_deflate":
+ compression = "tiff_adobe_deflate"
+
+ libtiff = WRITE_LIBTIFF or compression != "raw"
+
+ # required for color libtiff images
+ ifd[PLANAR_CONFIGURATION] = 1
+
+ ifd[IMAGEWIDTH] = im.size[0]
+ ifd[IMAGELENGTH] = im.size[1]
+
+ # write any arbitrary tags passed in as an ImageFileDirectory
+ if "tiffinfo" in encoderinfo:
+ info = encoderinfo["tiffinfo"]
+ elif "exif" in encoderinfo:
+ info = encoderinfo["exif"]
+ if isinstance(info, bytes):
+ exif = Image.Exif()
+ exif.load(info)
+ info = exif
+ else:
+ info = {}
+ logger.debug("Tiffinfo Keys: %s", list(info))
+ if isinstance(info, ImageFileDirectory_v1):
+ info = info.to_v2()
+ for key in info:
+ if isinstance(info, Image.Exif) and key in TiffTags.TAGS_V2_GROUPS:
+ ifd[key] = info.get_ifd(key)
+ else:
+ ifd[key] = info.get(key)
+ try:
+ ifd.tagtype[key] = info.tagtype[key]
+ except Exception:
+ pass # might not be an IFD. Might not have populated type
+
+ # additions written by Greg Couch, gregc@cgl.ucsf.edu
+ # inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com
+ if hasattr(im, "tag_v2"):
+ # preserve tags from original TIFF image file
+ for key in (
+ RESOLUTION_UNIT,
+ X_RESOLUTION,
+ Y_RESOLUTION,
+ IPTC_NAA_CHUNK,
+ PHOTOSHOP_CHUNK,
+ XMP,
+ ):
+ if key in im.tag_v2:
+ ifd[key] = im.tag_v2[key]
+ ifd.tagtype[key] = im.tag_v2.tagtype[key]
+
+ # preserve ICC profile (should also work when saving other formats
+ # which support profiles as TIFF) -- 2008-06-06 Florian Hoech
+ icc = encoderinfo.get("icc_profile", im.info.get("icc_profile"))
+ if icc:
+ ifd[ICCPROFILE] = icc
+
+ for key, name in [
+ (IMAGEDESCRIPTION, "description"),
+ (X_RESOLUTION, "resolution"),
+ (Y_RESOLUTION, "resolution"),
+ (X_RESOLUTION, "x_resolution"),
+ (Y_RESOLUTION, "y_resolution"),
+ (RESOLUTION_UNIT, "resolution_unit"),
+ (SOFTWARE, "software"),
+ (DATE_TIME, "date_time"),
+ (ARTIST, "artist"),
+ (COPYRIGHT, "copyright"),
+ ]:
+ if name in encoderinfo:
+ ifd[key] = encoderinfo[name]
+
+ dpi = encoderinfo.get("dpi")
+ if dpi:
+ ifd[RESOLUTION_UNIT] = 2
+ ifd[X_RESOLUTION] = dpi[0]
+ ifd[Y_RESOLUTION] = dpi[1]
+
+ if bits != (1,):
+ ifd[BITSPERSAMPLE] = bits
+ if len(bits) != 1:
+ ifd[SAMPLESPERPIXEL] = len(bits)
+ if extra is not None:
+ ifd[EXTRASAMPLES] = extra
+ if format != 1:
+ ifd[SAMPLEFORMAT] = format
+
+ if PHOTOMETRIC_INTERPRETATION not in ifd:
+ ifd[PHOTOMETRIC_INTERPRETATION] = photo
+ elif im.mode in ("1", "L") and ifd[PHOTOMETRIC_INTERPRETATION] == 0:
+ if im.mode == "1":
+ inverted_im = im.copy()
+ px = inverted_im.load()
+ for y in range(inverted_im.height):
+ for x in range(inverted_im.width):
+ px[x, y] = 0 if px[x, y] == 255 else 255
+ im = inverted_im
+ else:
+ im = ImageOps.invert(im)
+
+ if im.mode in ["P", "PA"]:
+ lut = im.im.getpalette("RGB", "RGB;L")
+ colormap = []
+ colors = len(lut) // 3
+ for i in range(3):
+ colormap += [v * 256 for v in lut[colors * i : colors * (i + 1)]]
+ colormap += [0] * (256 - colors)
+ ifd[COLORMAP] = colormap
+ # data orientation
+ stride = len(bits) * ((im.size[0] * bits[0] + 7) // 8)
+ # aim for given strip size (64 KB by default) when using libtiff writer
+ if libtiff:
+ im_strip_size = encoderinfo.get("strip_size", STRIP_SIZE)
+ rows_per_strip = 1 if stride == 0 else min(im_strip_size // stride, im.size[1])
+ # JPEG encoder expects multiple of 8 rows
+ if compression == "jpeg":
+ rows_per_strip = min(((rows_per_strip + 7) // 8) * 8, im.size[1])
+ else:
+ rows_per_strip = im.size[1]
+ if rows_per_strip == 0:
+ rows_per_strip = 1
+ strip_byte_counts = 1 if stride == 0 else stride * rows_per_strip
+ strips_per_image = (im.size[1] + rows_per_strip - 1) // rows_per_strip
+ ifd[ROWSPERSTRIP] = rows_per_strip
+ if strip_byte_counts >= 2**16:
+ ifd.tagtype[STRIPBYTECOUNTS] = TiffTags.LONG
+ ifd[STRIPBYTECOUNTS] = (strip_byte_counts,) * (strips_per_image - 1) + (
+ stride * im.size[1] - strip_byte_counts * (strips_per_image - 1),
+ )
+ ifd[STRIPOFFSETS] = tuple(
+ range(0, strip_byte_counts * strips_per_image, strip_byte_counts)
+ ) # this is adjusted by IFD writer
+ # no compression by default:
+ ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1)
+
+ if im.mode == "YCbCr":
+ for tag, value in {
+ YCBCRSUBSAMPLING: (1, 1),
+ REFERENCEBLACKWHITE: (0, 255, 128, 255, 128, 255),
+ }.items():
+ ifd.setdefault(tag, value)
+
+ blocklist = [TILEWIDTH, TILELENGTH, TILEOFFSETS, TILEBYTECOUNTS]
+ if libtiff:
+ if "quality" in encoderinfo:
+ quality = encoderinfo["quality"]
+ if not isinstance(quality, int) or quality < 0 or quality > 100:
+ msg = "Invalid quality setting"
+ raise ValueError(msg)
+ if compression != "jpeg":
+ msg = "quality setting only supported for 'jpeg' compression"
+ raise ValueError(msg)
+ ifd[JPEGQUALITY] = quality
+
+ logger.debug("Saving using libtiff encoder")
+ logger.debug("Items: %s", sorted(ifd.items()))
+ _fp = 0
+ if hasattr(fp, "fileno"):
+ try:
+ fp.seek(0)
+ _fp = os.dup(fp.fileno())
+ except io.UnsupportedOperation:
+ pass
+
+ # optional types for non core tags
+ types = {}
+ # STRIPOFFSETS and STRIPBYTECOUNTS are added by the library
+ # based on the data in the strip.
+ # The other tags expect arrays with a certain length (fixed or depending on
+ # BITSPERSAMPLE, etc), passing arrays with a different length will result in
+ # segfaults. Block these tags until we add extra validation.
+ # SUBIFD may also cause a segfault.
+ blocklist += [
+ REFERENCEBLACKWHITE,
+ STRIPBYTECOUNTS,
+ STRIPOFFSETS,
+ TRANSFERFUNCTION,
+ SUBIFD,
+ ]
+
+ # bits per sample is a single short in the tiff directory, not a list.
+ atts = {BITSPERSAMPLE: bits[0]}
+ # Merge the ones that we have with (optional) more bits from
+ # the original file, e.g x,y resolution so that we can
+ # save(load('')) == original file.
+ legacy_ifd = {}
+ if hasattr(im, "tag"):
+ legacy_ifd = im.tag.to_v2()
+
+ # SAMPLEFORMAT is determined by the image format and should not be copied
+ # from legacy_ifd.
+ supplied_tags = {**getattr(im, "tag_v2", {}), **legacy_ifd}
+ if SAMPLEFORMAT in supplied_tags:
+ del supplied_tags[SAMPLEFORMAT]
+
+ for tag, value in itertools.chain(ifd.items(), supplied_tags.items()):
+ # Libtiff can only process certain core items without adding
+ # them to the custom dictionary.
+ # Custom items are supported for int, float, unicode, string and byte
+ # values. Other types and tuples require a tagtype.
+ if tag not in TiffTags.LIBTIFF_CORE:
+ if not getattr(Image.core, "libtiff_support_custom_tags", False):
+ continue
+
+ if tag in ifd.tagtype:
+ types[tag] = ifd.tagtype[tag]
+ elif not (isinstance(value, (int, float, str, bytes))):
+ continue
+ else:
+ type = TiffTags.lookup(tag).type
+ if type:
+ types[tag] = type
+ if tag not in atts and tag not in blocklist:
+ if isinstance(value, str):
+ atts[tag] = value.encode("ascii", "replace") + b"\0"
+ elif isinstance(value, IFDRational):
+ atts[tag] = float(value)
+ else:
+ atts[tag] = value
+
+ if SAMPLEFORMAT in atts and len(atts[SAMPLEFORMAT]) == 1:
+ atts[SAMPLEFORMAT] = atts[SAMPLEFORMAT][0]
+
+ logger.debug("Converted items: %s", sorted(atts.items()))
+
+ # libtiff always expects the bytes in native order.
+ # we're storing image byte order. So, if the rawmode
+ # contains I;16, we need to convert from native to image
+ # byte order.
+ if im.mode in ("I;16B", "I;16"):
+ rawmode = "I;16N"
+
+ # Pass tags as sorted list so that the tags are set in a fixed order.
+ # This is required by libtiff for some tags. For example, the JPEGQUALITY
+ # pseudo tag requires that the COMPRESS tag was already set.
+ tags = list(atts.items())
+ tags.sort()
+ a = (rawmode, compression, _fp, filename, tags, types)
+ e = Image._getencoder(im.mode, "libtiff", a, encoderconfig)
+ e.setimage(im.im, (0, 0) + im.size)
+ while True:
+ # undone, change to self.decodermaxblock:
+ errcode, data = e.encode(16 * 1024)[1:]
+ if not _fp:
+ fp.write(data)
+ if errcode:
+ break
+ if _fp:
+ try:
+ os.close(_fp)
+ except OSError:
+ pass
+ if errcode < 0:
+ msg = f"encoder error {errcode} when writing image file"
+ raise OSError(msg)
+
+ else:
+ for tag in blocklist:
+ del ifd[tag]
+ offset = ifd.save(fp)
+
+ ImageFile._save(
+ im, fp, [("raw", (0, 0) + im.size, offset, (rawmode, stride, 1))]
+ )
+
+ # -- helper for multi-page save --
+ if "_debug_multipage" in encoderinfo:
+ # just to access o32 and o16 (using correct byte order)
+ im._debug_multipage = ifd
+
+
+class AppendingTiffWriter:
+ fieldSizes = [
+ 0, # None
+ 1, # byte
+ 1, # ascii
+ 2, # short
+ 4, # long
+ 8, # rational
+ 1, # sbyte
+ 1, # undefined
+ 2, # sshort
+ 4, # slong
+ 8, # srational
+ 4, # float
+ 8, # double
+ 4, # ifd
+ 2, # unicode
+ 4, # complex
+ 8, # long8
+ ]
+
+ # StripOffsets = 273
+ # FreeOffsets = 288
+ # TileOffsets = 324
+ # JPEGQTables = 519
+ # JPEGDCTables = 520
+ # JPEGACTables = 521
+ Tags = {273, 288, 324, 519, 520, 521}
+
+ def __init__(self, fn, new=False):
+ if hasattr(fn, "read"):
+ self.f = fn
+ self.close_fp = False
+ else:
+ self.name = fn
+ self.close_fp = True
+ try:
+ self.f = open(fn, "w+b" if new else "r+b")
+ except OSError:
+ self.f = open(fn, "w+b")
+ self.beginning = self.f.tell()
+ self.setup()
+
+ def setup(self):
+ # Reset everything.
+ self.f.seek(self.beginning, os.SEEK_SET)
+
+ self.whereToWriteNewIFDOffset = None
+ self.offsetOfNewPage = 0
+
+ self.IIMM = iimm = self.f.read(4)
+ if not iimm:
+ # empty file - first page
+ self.isFirst = True
+ return
+
+ self.isFirst = False
+ if iimm == b"II\x2a\x00":
+ self.setEndian("<")
+ elif iimm == b"MM\x00\x2a":
+ self.setEndian(">")
+ else:
+ msg = "Invalid TIFF file header"
+ raise RuntimeError(msg)
+
+ self.skipIFDs()
+ self.goToEnd()
+
+ def finalize(self):
+ if self.isFirst:
+ return
+
+ # fix offsets
+ self.f.seek(self.offsetOfNewPage)
+
+ iimm = self.f.read(4)
+ if not iimm:
+ # msg = "nothing written into new page"
+ # raise RuntimeError(msg)
+ # Make it easy to finish a frame without committing to a new one.
+ return
+
+ if iimm != self.IIMM:
+ msg = "IIMM of new page doesn't match IIMM of first page"
+ raise RuntimeError(msg)
+
+ ifd_offset = self.readLong()
+ ifd_offset += self.offsetOfNewPage
+ self.f.seek(self.whereToWriteNewIFDOffset)
+ self.writeLong(ifd_offset)
+ self.f.seek(ifd_offset)
+ self.fixIFD()
+
+ def newFrame(self):
+ # Call this to finish a frame.
+ self.finalize()
+ self.setup()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ if self.close_fp:
+ self.close()
+ return False
+
+ def tell(self):
+ return self.f.tell() - self.offsetOfNewPage
+
+ def seek(self, offset, whence=io.SEEK_SET):
+ if whence == os.SEEK_SET:
+ offset += self.offsetOfNewPage
+
+ self.f.seek(offset, whence)
+ return self.tell()
+
+ def goToEnd(self):
+ self.f.seek(0, os.SEEK_END)
+ pos = self.f.tell()
+
+ # pad to 16 byte boundary
+ pad_bytes = 16 - pos % 16
+ if 0 < pad_bytes < 16:
+ self.f.write(bytes(pad_bytes))
+ self.offsetOfNewPage = self.f.tell()
+
+ def setEndian(self, endian):
+ self.endian = endian
+ self.longFmt = self.endian + "L"
+ self.shortFmt = self.endian + "H"
+ self.tagFormat = self.endian + "HHL"
+
+ def skipIFDs(self):
+ while True:
+ ifd_offset = self.readLong()
+ if ifd_offset == 0:
+ self.whereToWriteNewIFDOffset = self.f.tell() - 4
+ break
+
+ self.f.seek(ifd_offset)
+ num_tags = self.readShort()
+ self.f.seek(num_tags * 12, os.SEEK_CUR)
+
+ def write(self, data):
+ return self.f.write(data)
+
+ def readShort(self):
+ (value,) = struct.unpack(self.shortFmt, self.f.read(2))
+ return value
+
+ def readLong(self):
+ (value,) = struct.unpack(self.longFmt, self.f.read(4))
+ return value
+
+ def rewriteLastShortToLong(self, value):
+ self.f.seek(-2, os.SEEK_CUR)
+ bytes_written = self.f.write(struct.pack(self.longFmt, value))
+ if bytes_written is not None and bytes_written != 4:
+ msg = f"wrote only {bytes_written} bytes but wanted 4"
+ raise RuntimeError(msg)
+
+ def rewriteLastShort(self, value):
+ self.f.seek(-2, os.SEEK_CUR)
+ bytes_written = self.f.write(struct.pack(self.shortFmt, value))
+ if bytes_written is not None and bytes_written != 2:
+ msg = f"wrote only {bytes_written} bytes but wanted 2"
+ raise RuntimeError(msg)
+
+ def rewriteLastLong(self, value):
+ self.f.seek(-4, os.SEEK_CUR)
+ bytes_written = self.f.write(struct.pack(self.longFmt, value))
+ if bytes_written is not None and bytes_written != 4:
+ msg = f"wrote only {bytes_written} bytes but wanted 4"
+ raise RuntimeError(msg)
+
+ def writeShort(self, value):
+ bytes_written = self.f.write(struct.pack(self.shortFmt, value))
+ if bytes_written is not None and bytes_written != 2:
+ msg = f"wrote only {bytes_written} bytes but wanted 2"
+ raise RuntimeError(msg)
+
+ def writeLong(self, value):
+ bytes_written = self.f.write(struct.pack(self.longFmt, value))
+ if bytes_written is not None and bytes_written != 4:
+ msg = f"wrote only {bytes_written} bytes but wanted 4"
+ raise RuntimeError(msg)
+
+ def close(self):
+ self.finalize()
+ self.f.close()
+
+ def fixIFD(self):
+ num_tags = self.readShort()
+
+ for i in range(num_tags):
+ tag, field_type, count = struct.unpack(self.tagFormat, self.f.read(8))
+
+ field_size = self.fieldSizes[field_type]
+ total_size = field_size * count
+ is_local = total_size <= 4
+ if not is_local:
+ offset = self.readLong()
+ offset += self.offsetOfNewPage
+ self.rewriteLastLong(offset)
+
+ if tag in self.Tags:
+ cur_pos = self.f.tell()
+
+ if is_local:
+ self.fixOffsets(
+ count, isShort=(field_size == 2), isLong=(field_size == 4)
+ )
+ self.f.seek(cur_pos + 4)
+ else:
+ self.f.seek(offset)
+ self.fixOffsets(
+ count, isShort=(field_size == 2), isLong=(field_size == 4)
+ )
+ self.f.seek(cur_pos)
+
+ offset = cur_pos = None
+
+ elif is_local:
+ # skip the locally stored value that is not an offset
+ self.f.seek(4, os.SEEK_CUR)
+
+ def fixOffsets(self, count, isShort=False, isLong=False):
+ if not isShort and not isLong:
+ msg = "offset is neither short nor long"
+ raise RuntimeError(msg)
+
+ for i in range(count):
+ offset = self.readShort() if isShort else self.readLong()
+ offset += self.offsetOfNewPage
+ if isShort and offset >= 65536:
+ # offset is now too large - we must convert shorts to longs
+ if count != 1:
+ msg = "not implemented"
+ raise RuntimeError(msg) # XXX TODO
+
+ # simple case - the offset is just one and therefore it is
+ # local (not referenced with another offset)
+ self.rewriteLastShortToLong(offset)
+ self.f.seek(-10, os.SEEK_CUR)
+ self.writeShort(TiffTags.LONG) # rewrite the type to LONG
+ self.f.seek(8, os.SEEK_CUR)
+ elif isShort:
+ self.rewriteLastShort(offset)
+ else:
+ self.rewriteLastLong(offset)
+
+
+def _save_all(im, fp, filename):
+ encoderinfo = im.encoderinfo.copy()
+ encoderconfig = im.encoderconfig
+ append_images = list(encoderinfo.get("append_images", []))
+ if not hasattr(im, "n_frames") and not append_images:
+ return _save(im, fp, filename)
+
+ cur_idx = im.tell()
+ try:
+ with AppendingTiffWriter(fp) as tf:
+ for ims in [im] + append_images:
+ ims.encoderinfo = encoderinfo
+ ims.encoderconfig = encoderconfig
+ if not hasattr(ims, "n_frames"):
+ nfr = 1
+ else:
+ nfr = ims.n_frames
+
+ for idx in range(nfr):
+ ims.seek(idx)
+ ims.load()
+ _save(ims, tf, filename)
+ tf.newFrame()
+ finally:
+ im.seek(cur_idx)
+
+
+#
+# --------------------------------------------------------------------
+# Register
+
+Image.register_open(TiffImageFile.format, TiffImageFile, _accept)
+Image.register_save(TiffImageFile.format, _save)
+Image.register_save_all(TiffImageFile.format, _save_all)
+
+Image.register_extensions(TiffImageFile.format, [".tif", ".tiff"])
+
+Image.register_mime(TiffImageFile.format, "image/tiff")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/TiffTags.py b/Backend/venv/lib/python3.12/site-packages/PIL/TiffTags.py
new file mode 100644
index 00000000..30b05e4e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/TiffTags.py
@@ -0,0 +1,560 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# TIFF tags
+#
+# This module provides clear-text names for various well-known
+# TIFF tags. the TIFF codec works just fine without it.
+#
+# Copyright (c) Secret Labs AB 1999.
+#
+# See the README file for information on usage and redistribution.
+#
+
+##
+# This module provides constants and clear-text names for various
+# well-known TIFF tags.
+##
+
+from collections import namedtuple
+
+
+class TagInfo(namedtuple("_TagInfo", "value name type length enum")):
+ __slots__ = []
+
+ def __new__(cls, value=None, name="unknown", type=None, length=None, enum=None):
+ return super().__new__(cls, value, name, type, length, enum or {})
+
+ def cvt_enum(self, value):
+ # Using get will call hash(value), which can be expensive
+ # for some types (e.g. Fraction). Since self.enum is rarely
+ # used, it's usually better to test it first.
+ return self.enum.get(value, value) if self.enum else value
+
+
+def lookup(tag, group=None):
+ """
+ :param tag: Integer tag number
+ :param group: Which :py:data:`~PIL.TiffTags.TAGS_V2_GROUPS` to look in
+
+ .. versionadded:: 8.3.0
+
+ :returns: Taginfo namedtuple, From the ``TAGS_V2`` info if possible,
+ otherwise just populating the value and name from ``TAGS``.
+ If the tag is not recognized, "unknown" is returned for the name
+
+ """
+
+ if group is not None:
+ info = TAGS_V2_GROUPS[group].get(tag) if group in TAGS_V2_GROUPS else None
+ else:
+ info = TAGS_V2.get(tag)
+ return info or TagInfo(tag, TAGS.get(tag, "unknown"))
+
+
+##
+# Map tag numbers to tag info.
+#
+# id: (Name, Type, Length, enum_values)
+#
+# The length here differs from the length in the tiff spec. For
+# numbers, the tiff spec is for the number of fields returned. We
+# agree here. For string-like types, the tiff spec uses the length of
+# field in bytes. In Pillow, we are using the number of expected
+# fields, in general 1 for string-like types.
+
+
+BYTE = 1
+ASCII = 2
+SHORT = 3
+LONG = 4
+RATIONAL = 5
+SIGNED_BYTE = 6
+UNDEFINED = 7
+SIGNED_SHORT = 8
+SIGNED_LONG = 9
+SIGNED_RATIONAL = 10
+FLOAT = 11
+DOUBLE = 12
+IFD = 13
+LONG8 = 16
+
+TAGS_V2 = {
+ 254: ("NewSubfileType", LONG, 1),
+ 255: ("SubfileType", SHORT, 1),
+ 256: ("ImageWidth", LONG, 1),
+ 257: ("ImageLength", LONG, 1),
+ 258: ("BitsPerSample", SHORT, 0),
+ 259: (
+ "Compression",
+ SHORT,
+ 1,
+ {
+ "Uncompressed": 1,
+ "CCITT 1d": 2,
+ "Group 3 Fax": 3,
+ "Group 4 Fax": 4,
+ "LZW": 5,
+ "JPEG": 6,
+ "PackBits": 32773,
+ },
+ ),
+ 262: (
+ "PhotometricInterpretation",
+ SHORT,
+ 1,
+ {
+ "WhiteIsZero": 0,
+ "BlackIsZero": 1,
+ "RGB": 2,
+ "RGB Palette": 3,
+ "Transparency Mask": 4,
+ "CMYK": 5,
+ "YCbCr": 6,
+ "CieLAB": 8,
+ "CFA": 32803, # TIFF/EP, Adobe DNG
+ "LinearRaw": 32892, # Adobe DNG
+ },
+ ),
+ 263: ("Threshholding", SHORT, 1),
+ 264: ("CellWidth", SHORT, 1),
+ 265: ("CellLength", SHORT, 1),
+ 266: ("FillOrder", SHORT, 1),
+ 269: ("DocumentName", ASCII, 1),
+ 270: ("ImageDescription", ASCII, 1),
+ 271: ("Make", ASCII, 1),
+ 272: ("Model", ASCII, 1),
+ 273: ("StripOffsets", LONG, 0),
+ 274: ("Orientation", SHORT, 1),
+ 277: ("SamplesPerPixel", SHORT, 1),
+ 278: ("RowsPerStrip", LONG, 1),
+ 279: ("StripByteCounts", LONG, 0),
+ 280: ("MinSampleValue", SHORT, 0),
+ 281: ("MaxSampleValue", SHORT, 0),
+ 282: ("XResolution", RATIONAL, 1),
+ 283: ("YResolution", RATIONAL, 1),
+ 284: ("PlanarConfiguration", SHORT, 1, {"Contiguous": 1, "Separate": 2}),
+ 285: ("PageName", ASCII, 1),
+ 286: ("XPosition", RATIONAL, 1),
+ 287: ("YPosition", RATIONAL, 1),
+ 288: ("FreeOffsets", LONG, 1),
+ 289: ("FreeByteCounts", LONG, 1),
+ 290: ("GrayResponseUnit", SHORT, 1),
+ 291: ("GrayResponseCurve", SHORT, 0),
+ 292: ("T4Options", LONG, 1),
+ 293: ("T6Options", LONG, 1),
+ 296: ("ResolutionUnit", SHORT, 1, {"none": 1, "inch": 2, "cm": 3}),
+ 297: ("PageNumber", SHORT, 2),
+ 301: ("TransferFunction", SHORT, 0),
+ 305: ("Software", ASCII, 1),
+ 306: ("DateTime", ASCII, 1),
+ 315: ("Artist", ASCII, 1),
+ 316: ("HostComputer", ASCII, 1),
+ 317: ("Predictor", SHORT, 1, {"none": 1, "Horizontal Differencing": 2}),
+ 318: ("WhitePoint", RATIONAL, 2),
+ 319: ("PrimaryChromaticities", RATIONAL, 6),
+ 320: ("ColorMap", SHORT, 0),
+ 321: ("HalftoneHints", SHORT, 2),
+ 322: ("TileWidth", LONG, 1),
+ 323: ("TileLength", LONG, 1),
+ 324: ("TileOffsets", LONG, 0),
+ 325: ("TileByteCounts", LONG, 0),
+ 330: ("SubIFDs", LONG, 0),
+ 332: ("InkSet", SHORT, 1),
+ 333: ("InkNames", ASCII, 1),
+ 334: ("NumberOfInks", SHORT, 1),
+ 336: ("DotRange", SHORT, 0),
+ 337: ("TargetPrinter", ASCII, 1),
+ 338: ("ExtraSamples", SHORT, 0),
+ 339: ("SampleFormat", SHORT, 0),
+ 340: ("SMinSampleValue", DOUBLE, 0),
+ 341: ("SMaxSampleValue", DOUBLE, 0),
+ 342: ("TransferRange", SHORT, 6),
+ 347: ("JPEGTables", UNDEFINED, 1),
+ # obsolete JPEG tags
+ 512: ("JPEGProc", SHORT, 1),
+ 513: ("JPEGInterchangeFormat", LONG, 1),
+ 514: ("JPEGInterchangeFormatLength", LONG, 1),
+ 515: ("JPEGRestartInterval", SHORT, 1),
+ 517: ("JPEGLosslessPredictors", SHORT, 0),
+ 518: ("JPEGPointTransforms", SHORT, 0),
+ 519: ("JPEGQTables", LONG, 0),
+ 520: ("JPEGDCTables", LONG, 0),
+ 521: ("JPEGACTables", LONG, 0),
+ 529: ("YCbCrCoefficients", RATIONAL, 3),
+ 530: ("YCbCrSubSampling", SHORT, 2),
+ 531: ("YCbCrPositioning", SHORT, 1),
+ 532: ("ReferenceBlackWhite", RATIONAL, 6),
+ 700: ("XMP", BYTE, 0),
+ 33432: ("Copyright", ASCII, 1),
+ 33723: ("IptcNaaInfo", UNDEFINED, 1),
+ 34377: ("PhotoshopInfo", BYTE, 0),
+ # FIXME add more tags here
+ 34665: ("ExifIFD", LONG, 1),
+ 34675: ("ICCProfile", UNDEFINED, 1),
+ 34853: ("GPSInfoIFD", LONG, 1),
+ 36864: ("ExifVersion", UNDEFINED, 1),
+ 37724: ("ImageSourceData", UNDEFINED, 1),
+ 40965: ("InteroperabilityIFD", LONG, 1),
+ 41730: ("CFAPattern", UNDEFINED, 1),
+ # MPInfo
+ 45056: ("MPFVersion", UNDEFINED, 1),
+ 45057: ("NumberOfImages", LONG, 1),
+ 45058: ("MPEntry", UNDEFINED, 1),
+ 45059: ("ImageUIDList", UNDEFINED, 0), # UNDONE, check
+ 45060: ("TotalFrames", LONG, 1),
+ 45313: ("MPIndividualNum", LONG, 1),
+ 45569: ("PanOrientation", LONG, 1),
+ 45570: ("PanOverlap_H", RATIONAL, 1),
+ 45571: ("PanOverlap_V", RATIONAL, 1),
+ 45572: ("BaseViewpointNum", LONG, 1),
+ 45573: ("ConvergenceAngle", SIGNED_RATIONAL, 1),
+ 45574: ("BaselineLength", RATIONAL, 1),
+ 45575: ("VerticalDivergence", SIGNED_RATIONAL, 1),
+ 45576: ("AxisDistance_X", SIGNED_RATIONAL, 1),
+ 45577: ("AxisDistance_Y", SIGNED_RATIONAL, 1),
+ 45578: ("AxisDistance_Z", SIGNED_RATIONAL, 1),
+ 45579: ("YawAngle", SIGNED_RATIONAL, 1),
+ 45580: ("PitchAngle", SIGNED_RATIONAL, 1),
+ 45581: ("RollAngle", SIGNED_RATIONAL, 1),
+ 40960: ("FlashPixVersion", UNDEFINED, 1),
+ 50741: ("MakerNoteSafety", SHORT, 1, {"Unsafe": 0, "Safe": 1}),
+ 50780: ("BestQualityScale", RATIONAL, 1),
+ 50838: ("ImageJMetaDataByteCounts", LONG, 0), # Can be more than one
+ 50839: ("ImageJMetaData", UNDEFINED, 1), # see Issue #2006
+}
+TAGS_V2_GROUPS = {
+ # ExifIFD
+ 34665: {
+ 36864: ("ExifVersion", UNDEFINED, 1),
+ 40960: ("FlashPixVersion", UNDEFINED, 1),
+ 40965: ("InteroperabilityIFD", LONG, 1),
+ 41730: ("CFAPattern", UNDEFINED, 1),
+ },
+ # GPSInfoIFD
+ 34853: {
+ 0: ("GPSVersionID", BYTE, 4),
+ 1: ("GPSLatitudeRef", ASCII, 2),
+ 2: ("GPSLatitude", RATIONAL, 3),
+ 3: ("GPSLongitudeRef", ASCII, 2),
+ 4: ("GPSLongitude", RATIONAL, 3),
+ 5: ("GPSAltitudeRef", BYTE, 1),
+ 6: ("GPSAltitude", RATIONAL, 1),
+ 7: ("GPSTimeStamp", RATIONAL, 3),
+ 8: ("GPSSatellites", ASCII, 0),
+ 9: ("GPSStatus", ASCII, 2),
+ 10: ("GPSMeasureMode", ASCII, 2),
+ 11: ("GPSDOP", RATIONAL, 1),
+ 12: ("GPSSpeedRef", ASCII, 2),
+ 13: ("GPSSpeed", RATIONAL, 1),
+ 14: ("GPSTrackRef", ASCII, 2),
+ 15: ("GPSTrack", RATIONAL, 1),
+ 16: ("GPSImgDirectionRef", ASCII, 2),
+ 17: ("GPSImgDirection", RATIONAL, 1),
+ 18: ("GPSMapDatum", ASCII, 0),
+ 19: ("GPSDestLatitudeRef", ASCII, 2),
+ 20: ("GPSDestLatitude", RATIONAL, 3),
+ 21: ("GPSDestLongitudeRef", ASCII, 2),
+ 22: ("GPSDestLongitude", RATIONAL, 3),
+ 23: ("GPSDestBearingRef", ASCII, 2),
+ 24: ("GPSDestBearing", RATIONAL, 1),
+ 25: ("GPSDestDistanceRef", ASCII, 2),
+ 26: ("GPSDestDistance", RATIONAL, 1),
+ 27: ("GPSProcessingMethod", UNDEFINED, 0),
+ 28: ("GPSAreaInformation", UNDEFINED, 0),
+ 29: ("GPSDateStamp", ASCII, 11),
+ 30: ("GPSDifferential", SHORT, 1),
+ },
+ # InteroperabilityIFD
+ 40965: {1: ("InteropIndex", ASCII, 1), 2: ("InteropVersion", UNDEFINED, 1)},
+}
+
+# Legacy Tags structure
+# these tags aren't included above, but were in the previous versions
+TAGS = {
+ 347: "JPEGTables",
+ 700: "XMP",
+ # Additional Exif Info
+ 32932: "Wang Annotation",
+ 33434: "ExposureTime",
+ 33437: "FNumber",
+ 33445: "MD FileTag",
+ 33446: "MD ScalePixel",
+ 33447: "MD ColorTable",
+ 33448: "MD LabName",
+ 33449: "MD SampleInfo",
+ 33450: "MD PrepDate",
+ 33451: "MD PrepTime",
+ 33452: "MD FileUnits",
+ 33550: "ModelPixelScaleTag",
+ 33723: "IptcNaaInfo",
+ 33918: "INGR Packet Data Tag",
+ 33919: "INGR Flag Registers",
+ 33920: "IrasB Transformation Matrix",
+ 33922: "ModelTiepointTag",
+ 34264: "ModelTransformationTag",
+ 34377: "PhotoshopInfo",
+ 34735: "GeoKeyDirectoryTag",
+ 34736: "GeoDoubleParamsTag",
+ 34737: "GeoAsciiParamsTag",
+ 34850: "ExposureProgram",
+ 34852: "SpectralSensitivity",
+ 34855: "ISOSpeedRatings",
+ 34856: "OECF",
+ 34864: "SensitivityType",
+ 34865: "StandardOutputSensitivity",
+ 34866: "RecommendedExposureIndex",
+ 34867: "ISOSpeed",
+ 34868: "ISOSpeedLatitudeyyy",
+ 34869: "ISOSpeedLatitudezzz",
+ 34908: "HylaFAX FaxRecvParams",
+ 34909: "HylaFAX FaxSubAddress",
+ 34910: "HylaFAX FaxRecvTime",
+ 36864: "ExifVersion",
+ 36867: "DateTimeOriginal",
+ 36868: "DateTimeDigitized",
+ 37121: "ComponentsConfiguration",
+ 37122: "CompressedBitsPerPixel",
+ 37724: "ImageSourceData",
+ 37377: "ShutterSpeedValue",
+ 37378: "ApertureValue",
+ 37379: "BrightnessValue",
+ 37380: "ExposureBiasValue",
+ 37381: "MaxApertureValue",
+ 37382: "SubjectDistance",
+ 37383: "MeteringMode",
+ 37384: "LightSource",
+ 37385: "Flash",
+ 37386: "FocalLength",
+ 37396: "SubjectArea",
+ 37500: "MakerNote",
+ 37510: "UserComment",
+ 37520: "SubSec",
+ 37521: "SubSecTimeOriginal",
+ 37522: "SubsecTimeDigitized",
+ 40960: "FlashPixVersion",
+ 40961: "ColorSpace",
+ 40962: "PixelXDimension",
+ 40963: "PixelYDimension",
+ 40964: "RelatedSoundFile",
+ 40965: "InteroperabilityIFD",
+ 41483: "FlashEnergy",
+ 41484: "SpatialFrequencyResponse",
+ 41486: "FocalPlaneXResolution",
+ 41487: "FocalPlaneYResolution",
+ 41488: "FocalPlaneResolutionUnit",
+ 41492: "SubjectLocation",
+ 41493: "ExposureIndex",
+ 41495: "SensingMethod",
+ 41728: "FileSource",
+ 41729: "SceneType",
+ 41730: "CFAPattern",
+ 41985: "CustomRendered",
+ 41986: "ExposureMode",
+ 41987: "WhiteBalance",
+ 41988: "DigitalZoomRatio",
+ 41989: "FocalLengthIn35mmFilm",
+ 41990: "SceneCaptureType",
+ 41991: "GainControl",
+ 41992: "Contrast",
+ 41993: "Saturation",
+ 41994: "Sharpness",
+ 41995: "DeviceSettingDescription",
+ 41996: "SubjectDistanceRange",
+ 42016: "ImageUniqueID",
+ 42032: "CameraOwnerName",
+ 42033: "BodySerialNumber",
+ 42034: "LensSpecification",
+ 42035: "LensMake",
+ 42036: "LensModel",
+ 42037: "LensSerialNumber",
+ 42112: "GDAL_METADATA",
+ 42113: "GDAL_NODATA",
+ 42240: "Gamma",
+ 50215: "Oce Scanjob Description",
+ 50216: "Oce Application Selector",
+ 50217: "Oce Identification Number",
+ 50218: "Oce ImageLogic Characteristics",
+ # Adobe DNG
+ 50706: "DNGVersion",
+ 50707: "DNGBackwardVersion",
+ 50708: "UniqueCameraModel",
+ 50709: "LocalizedCameraModel",
+ 50710: "CFAPlaneColor",
+ 50711: "CFALayout",
+ 50712: "LinearizationTable",
+ 50713: "BlackLevelRepeatDim",
+ 50714: "BlackLevel",
+ 50715: "BlackLevelDeltaH",
+ 50716: "BlackLevelDeltaV",
+ 50717: "WhiteLevel",
+ 50718: "DefaultScale",
+ 50719: "DefaultCropOrigin",
+ 50720: "DefaultCropSize",
+ 50721: "ColorMatrix1",
+ 50722: "ColorMatrix2",
+ 50723: "CameraCalibration1",
+ 50724: "CameraCalibration2",
+ 50725: "ReductionMatrix1",
+ 50726: "ReductionMatrix2",
+ 50727: "AnalogBalance",
+ 50728: "AsShotNeutral",
+ 50729: "AsShotWhiteXY",
+ 50730: "BaselineExposure",
+ 50731: "BaselineNoise",
+ 50732: "BaselineSharpness",
+ 50733: "BayerGreenSplit",
+ 50734: "LinearResponseLimit",
+ 50735: "CameraSerialNumber",
+ 50736: "LensInfo",
+ 50737: "ChromaBlurRadius",
+ 50738: "AntiAliasStrength",
+ 50740: "DNGPrivateData",
+ 50778: "CalibrationIlluminant1",
+ 50779: "CalibrationIlluminant2",
+ 50784: "Alias Layer Metadata",
+}
+
+
+def _populate():
+ for k, v in TAGS_V2.items():
+ # Populate legacy structure.
+ TAGS[k] = v[0]
+ if len(v) == 4:
+ for sk, sv in v[3].items():
+ TAGS[(k, sv)] = sk
+
+ TAGS_V2[k] = TagInfo(k, *v)
+
+ for group, tags in TAGS_V2_GROUPS.items():
+ for k, v in tags.items():
+ tags[k] = TagInfo(k, *v)
+
+
+_populate()
+##
+# Map type numbers to type names -- defined in ImageFileDirectory.
+
+TYPES = {}
+
+# was:
+# TYPES = {
+# 1: "byte",
+# 2: "ascii",
+# 3: "short",
+# 4: "long",
+# 5: "rational",
+# 6: "signed byte",
+# 7: "undefined",
+# 8: "signed short",
+# 9: "signed long",
+# 10: "signed rational",
+# 11: "float",
+# 12: "double",
+# }
+
+#
+# These tags are handled by default in libtiff, without
+# adding to the custom dictionary. From tif_dir.c, searching for
+# case TIFFTAG in the _TIFFVSetField function:
+# Line: item.
+# 148: case TIFFTAG_SUBFILETYPE:
+# 151: case TIFFTAG_IMAGEWIDTH:
+# 154: case TIFFTAG_IMAGELENGTH:
+# 157: case TIFFTAG_BITSPERSAMPLE:
+# 181: case TIFFTAG_COMPRESSION:
+# 202: case TIFFTAG_PHOTOMETRIC:
+# 205: case TIFFTAG_THRESHHOLDING:
+# 208: case TIFFTAG_FILLORDER:
+# 214: case TIFFTAG_ORIENTATION:
+# 221: case TIFFTAG_SAMPLESPERPIXEL:
+# 228: case TIFFTAG_ROWSPERSTRIP:
+# 238: case TIFFTAG_MINSAMPLEVALUE:
+# 241: case TIFFTAG_MAXSAMPLEVALUE:
+# 244: case TIFFTAG_SMINSAMPLEVALUE:
+# 247: case TIFFTAG_SMAXSAMPLEVALUE:
+# 250: case TIFFTAG_XRESOLUTION:
+# 256: case TIFFTAG_YRESOLUTION:
+# 262: case TIFFTAG_PLANARCONFIG:
+# 268: case TIFFTAG_XPOSITION:
+# 271: case TIFFTAG_YPOSITION:
+# 274: case TIFFTAG_RESOLUTIONUNIT:
+# 280: case TIFFTAG_PAGENUMBER:
+# 284: case TIFFTAG_HALFTONEHINTS:
+# 288: case TIFFTAG_COLORMAP:
+# 294: case TIFFTAG_EXTRASAMPLES:
+# 298: case TIFFTAG_MATTEING:
+# 305: case TIFFTAG_TILEWIDTH:
+# 316: case TIFFTAG_TILELENGTH:
+# 327: case TIFFTAG_TILEDEPTH:
+# 333: case TIFFTAG_DATATYPE:
+# 344: case TIFFTAG_SAMPLEFORMAT:
+# 361: case TIFFTAG_IMAGEDEPTH:
+# 364: case TIFFTAG_SUBIFD:
+# 376: case TIFFTAG_YCBCRPOSITIONING:
+# 379: case TIFFTAG_YCBCRSUBSAMPLING:
+# 383: case TIFFTAG_TRANSFERFUNCTION:
+# 389: case TIFFTAG_REFERENCEBLACKWHITE:
+# 393: case TIFFTAG_INKNAMES:
+
+# Following pseudo-tags are also handled by default in libtiff:
+# TIFFTAG_JPEGQUALITY 65537
+
+# some of these are not in our TAGS_V2 dict and were included from tiff.h
+
+# This list also exists in encode.c
+LIBTIFF_CORE = {
+ 255,
+ 256,
+ 257,
+ 258,
+ 259,
+ 262,
+ 263,
+ 266,
+ 274,
+ 277,
+ 278,
+ 280,
+ 281,
+ 340,
+ 341,
+ 282,
+ 283,
+ 284,
+ 286,
+ 287,
+ 296,
+ 297,
+ 321,
+ 320,
+ 338,
+ 32995,
+ 322,
+ 323,
+ 32998,
+ 32996,
+ 339,
+ 32997,
+ 330,
+ 531,
+ 530,
+ 301,
+ 532,
+ 333,
+ # as above
+ 269, # this has been in our tests forever, and works
+ 65537,
+}
+
+LIBTIFF_CORE.remove(255) # We don't have support for subfiletypes
+LIBTIFF_CORE.remove(322) # We don't have support for writing tiled images with libtiff
+LIBTIFF_CORE.remove(323) # Tiled images
+LIBTIFF_CORE.remove(333) # Ink Names either
+
+# Note to advanced users: There may be combinations of these
+# parameters and values that when added properly, will work and
+# produce valid tiff images that may work in your application.
+# It is safe to add and remove tags from this set from Pillow's point
+# of view so long as you test against libtiff.
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/WalImageFile.py b/Backend/venv/lib/python3.12/site-packages/PIL/WalImageFile.py
new file mode 100644
index 00000000..3d9f97f8
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/WalImageFile.py
@@ -0,0 +1,123 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# WAL file handling
+#
+# History:
+# 2003-04-23 fl created
+#
+# Copyright (c) 2003 by Fredrik Lundh.
+#
+# See the README file for information on usage and redistribution.
+#
+
+"""
+This reader is based on the specification available from:
+https://www.flipcode.com/archives/Quake_2_BSP_File_Format.shtml
+and has been tested with a few sample files found using google.
+
+.. note::
+ This format cannot be automatically recognized, so the reader
+ is not registered for use with :py:func:`PIL.Image.open()`.
+ To open a WAL file, use the :py:func:`PIL.WalImageFile.open()` function instead.
+"""
+
+from . import Image, ImageFile
+from ._binary import i32le as i32
+
+
+class WalImageFile(ImageFile.ImageFile):
+ format = "WAL"
+ format_description = "Quake2 Texture"
+
+ def _open(self):
+ self._mode = "P"
+
+ # read header fields
+ header = self.fp.read(32 + 24 + 32 + 12)
+ self._size = i32(header, 32), i32(header, 36)
+ Image._decompression_bomb_check(self.size)
+
+ # load pixel data
+ offset = i32(header, 40)
+ self.fp.seek(offset)
+
+ # strings are null-terminated
+ self.info["name"] = header[:32].split(b"\0", 1)[0]
+ next_name = header[56 : 56 + 32].split(b"\0", 1)[0]
+ if next_name:
+ self.info["next_name"] = next_name
+
+ def load(self):
+ if not self.im:
+ self.im = Image.core.new(self.mode, self.size)
+ self.frombytes(self.fp.read(self.size[0] * self.size[1]))
+ self.putpalette(quake2palette)
+ return Image.Image.load(self)
+
+
+def open(filename):
+ """
+ Load texture from a Quake2 WAL texture file.
+
+ By default, a Quake2 standard palette is attached to the texture.
+ To override the palette, use the :py:func:`PIL.Image.Image.putpalette()` method.
+
+ :param filename: WAL file name, or an opened file handle.
+ :returns: An image instance.
+ """
+ return WalImageFile(filename)
+
+
+quake2palette = (
+ # default palette taken from piffo 0.93 by Hans Häggström
+ b"\x01\x01\x01\x0b\x0b\x0b\x12\x12\x12\x17\x17\x17\x1b\x1b\x1b\x1e"
+ b"\x1e\x1e\x22\x22\x22\x26\x26\x26\x29\x29\x29\x2c\x2c\x2c\x2f\x2f"
+ b"\x2f\x32\x32\x32\x35\x35\x35\x37\x37\x37\x3a\x3a\x3a\x3c\x3c\x3c"
+ b"\x24\x1e\x13\x22\x1c\x12\x20\x1b\x12\x1f\x1a\x10\x1d\x19\x10\x1b"
+ b"\x17\x0f\x1a\x16\x0f\x18\x14\x0d\x17\x13\x0d\x16\x12\x0d\x14\x10"
+ b"\x0b\x13\x0f\x0b\x10\x0d\x0a\x0f\x0b\x0a\x0d\x0b\x07\x0b\x0a\x07"
+ b"\x23\x23\x26\x22\x22\x25\x22\x20\x23\x21\x1f\x22\x20\x1e\x20\x1f"
+ b"\x1d\x1e\x1d\x1b\x1c\x1b\x1a\x1a\x1a\x19\x19\x18\x17\x17\x17\x16"
+ b"\x16\x14\x14\x14\x13\x13\x13\x10\x10\x10\x0f\x0f\x0f\x0d\x0d\x0d"
+ b"\x2d\x28\x20\x29\x24\x1c\x27\x22\x1a\x25\x1f\x17\x38\x2e\x1e\x31"
+ b"\x29\x1a\x2c\x25\x17\x26\x20\x14\x3c\x30\x14\x37\x2c\x13\x33\x28"
+ b"\x12\x2d\x24\x10\x28\x1f\x0f\x22\x1a\x0b\x1b\x14\x0a\x13\x0f\x07"
+ b"\x31\x1a\x16\x30\x17\x13\x2e\x16\x10\x2c\x14\x0d\x2a\x12\x0b\x27"
+ b"\x0f\x0a\x25\x0f\x07\x21\x0d\x01\x1e\x0b\x01\x1c\x0b\x01\x1a\x0b"
+ b"\x01\x18\x0a\x01\x16\x0a\x01\x13\x0a\x01\x10\x07\x01\x0d\x07\x01"
+ b"\x29\x23\x1e\x27\x21\x1c\x26\x20\x1b\x25\x1f\x1a\x23\x1d\x19\x21"
+ b"\x1c\x18\x20\x1b\x17\x1e\x19\x16\x1c\x18\x14\x1b\x17\x13\x19\x14"
+ b"\x10\x17\x13\x0f\x14\x10\x0d\x12\x0f\x0b\x0f\x0b\x0a\x0b\x0a\x07"
+ b"\x26\x1a\x0f\x23\x19\x0f\x20\x17\x0f\x1c\x16\x0f\x19\x13\x0d\x14"
+ b"\x10\x0b\x10\x0d\x0a\x0b\x0a\x07\x33\x22\x1f\x35\x29\x26\x37\x2f"
+ b"\x2d\x39\x35\x34\x37\x39\x3a\x33\x37\x39\x30\x34\x36\x2b\x31\x34"
+ b"\x27\x2e\x31\x22\x2b\x2f\x1d\x28\x2c\x17\x25\x2a\x0f\x20\x26\x0d"
+ b"\x1e\x25\x0b\x1c\x22\x0a\x1b\x20\x07\x19\x1e\x07\x17\x1b\x07\x14"
+ b"\x18\x01\x12\x16\x01\x0f\x12\x01\x0b\x0d\x01\x07\x0a\x01\x01\x01"
+ b"\x2c\x21\x21\x2a\x1f\x1f\x29\x1d\x1d\x27\x1c\x1c\x26\x1a\x1a\x24"
+ b"\x18\x18\x22\x17\x17\x21\x16\x16\x1e\x13\x13\x1b\x12\x12\x18\x10"
+ b"\x10\x16\x0d\x0d\x12\x0b\x0b\x0d\x0a\x0a\x0a\x07\x07\x01\x01\x01"
+ b"\x2e\x30\x29\x2d\x2e\x27\x2b\x2c\x26\x2a\x2a\x24\x28\x29\x23\x27"
+ b"\x27\x21\x26\x26\x1f\x24\x24\x1d\x22\x22\x1c\x1f\x1f\x1a\x1c\x1c"
+ b"\x18\x19\x19\x16\x17\x17\x13\x13\x13\x10\x0f\x0f\x0d\x0b\x0b\x0a"
+ b"\x30\x1e\x1b\x2d\x1c\x19\x2c\x1a\x17\x2a\x19\x14\x28\x17\x13\x26"
+ b"\x16\x10\x24\x13\x0f\x21\x12\x0d\x1f\x10\x0b\x1c\x0f\x0a\x19\x0d"
+ b"\x0a\x16\x0b\x07\x12\x0a\x07\x0f\x07\x01\x0a\x01\x01\x01\x01\x01"
+ b"\x28\x29\x38\x26\x27\x36\x25\x26\x34\x24\x24\x31\x22\x22\x2f\x20"
+ b"\x21\x2d\x1e\x1f\x2a\x1d\x1d\x27\x1b\x1b\x25\x19\x19\x21\x17\x17"
+ b"\x1e\x14\x14\x1b\x13\x12\x17\x10\x0f\x13\x0d\x0b\x0f\x0a\x07\x07"
+ b"\x2f\x32\x29\x2d\x30\x26\x2b\x2e\x24\x29\x2c\x21\x27\x2a\x1e\x25"
+ b"\x28\x1c\x23\x26\x1a\x21\x25\x18\x1e\x22\x14\x1b\x1f\x10\x19\x1c"
+ b"\x0d\x17\x1a\x0a\x13\x17\x07\x10\x13\x01\x0d\x0f\x01\x0a\x0b\x01"
+ b"\x01\x3f\x01\x13\x3c\x0b\x1b\x39\x10\x20\x35\x14\x23\x31\x17\x23"
+ b"\x2d\x18\x23\x29\x18\x3f\x3f\x3f\x3f\x3f\x39\x3f\x3f\x31\x3f\x3f"
+ b"\x2a\x3f\x3f\x20\x3f\x3f\x14\x3f\x3c\x12\x3f\x39\x0f\x3f\x35\x0b"
+ b"\x3f\x32\x07\x3f\x2d\x01\x3d\x2a\x01\x3b\x26\x01\x39\x21\x01\x37"
+ b"\x1d\x01\x34\x1a\x01\x32\x16\x01\x2f\x12\x01\x2d\x0f\x01\x2a\x0b"
+ b"\x01\x27\x07\x01\x23\x01\x01\x1d\x01\x01\x17\x01\x01\x10\x01\x01"
+ b"\x3d\x01\x01\x19\x19\x3f\x3f\x01\x01\x01\x01\x3f\x16\x16\x13\x10"
+ b"\x10\x0f\x0d\x0d\x0b\x3c\x2e\x2a\x36\x27\x20\x30\x21\x18\x29\x1b"
+ b"\x10\x3c\x39\x37\x37\x32\x2f\x31\x2c\x28\x2b\x26\x21\x30\x22\x20"
+)
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/WebPImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/WebPImagePlugin.py
new file mode 100644
index 00000000..612fc094
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/WebPImagePlugin.py
@@ -0,0 +1,361 @@
+from io import BytesIO
+
+from . import Image, ImageFile
+
+try:
+ from . import _webp
+
+ SUPPORTED = True
+except ImportError:
+ SUPPORTED = False
+
+
+_VALID_WEBP_MODES = {"RGBX": True, "RGBA": True, "RGB": True}
+
+_VALID_WEBP_LEGACY_MODES = {"RGB": True, "RGBA": True}
+
+_VP8_MODES_BY_IDENTIFIER = {
+ b"VP8 ": "RGB",
+ b"VP8X": "RGBA",
+ b"VP8L": "RGBA", # lossless
+}
+
+
+def _accept(prefix):
+ is_riff_file_format = prefix[:4] == b"RIFF"
+ is_webp_file = prefix[8:12] == b"WEBP"
+ is_valid_vp8_mode = prefix[12:16] in _VP8_MODES_BY_IDENTIFIER
+
+ if is_riff_file_format and is_webp_file and is_valid_vp8_mode:
+ if not SUPPORTED:
+ return (
+ "image file could not be identified because WEBP support not installed"
+ )
+ return True
+
+
+class WebPImageFile(ImageFile.ImageFile):
+ format = "WEBP"
+ format_description = "WebP image"
+ __loaded = 0
+ __logical_frame = 0
+
+ def _open(self):
+ if not _webp.HAVE_WEBPANIM:
+ # Legacy mode
+ data, width, height, self._mode, icc_profile, exif = _webp.WebPDecode(
+ self.fp.read()
+ )
+ if icc_profile:
+ self.info["icc_profile"] = icc_profile
+ if exif:
+ self.info["exif"] = exif
+ self._size = width, height
+ self.fp = BytesIO(data)
+ self.tile = [("raw", (0, 0) + self.size, 0, self.mode)]
+ self.n_frames = 1
+ self.is_animated = False
+ return
+
+ # Use the newer AnimDecoder API to parse the (possibly) animated file,
+ # and access muxed chunks like ICC/EXIF/XMP.
+ self._decoder = _webp.WebPAnimDecoder(self.fp.read())
+
+ # Get info from decoder
+ width, height, loop_count, bgcolor, frame_count, mode = self._decoder.get_info()
+ self._size = width, height
+ self.info["loop"] = loop_count
+ bg_a, bg_r, bg_g, bg_b = (
+ (bgcolor >> 24) & 0xFF,
+ (bgcolor >> 16) & 0xFF,
+ (bgcolor >> 8) & 0xFF,
+ bgcolor & 0xFF,
+ )
+ self.info["background"] = (bg_r, bg_g, bg_b, bg_a)
+ self.n_frames = frame_count
+ self.is_animated = self.n_frames > 1
+ self._mode = "RGB" if mode == "RGBX" else mode
+ self.rawmode = mode
+ self.tile = []
+
+ # Attempt to read ICC / EXIF / XMP chunks from file
+ icc_profile = self._decoder.get_chunk("ICCP")
+ exif = self._decoder.get_chunk("EXIF")
+ xmp = self._decoder.get_chunk("XMP ")
+ if icc_profile:
+ self.info["icc_profile"] = icc_profile
+ if exif:
+ self.info["exif"] = exif
+ if xmp:
+ self.info["xmp"] = xmp
+
+ # Initialize seek state
+ self._reset(reset=False)
+
+ def _getexif(self):
+ if "exif" not in self.info:
+ return None
+ return self.getexif()._get_merged_dict()
+
+ def getxmp(self):
+ """
+ Returns a dictionary containing the XMP tags.
+ Requires defusedxml to be installed.
+
+ :returns: XMP tags in a dictionary.
+ """
+ return self._getxmp(self.info["xmp"]) if "xmp" in self.info else {}
+
+ def seek(self, frame):
+ if not self._seek_check(frame):
+ return
+
+ # Set logical frame to requested position
+ self.__logical_frame = frame
+
+ def _reset(self, reset=True):
+ if reset:
+ self._decoder.reset()
+ self.__physical_frame = 0
+ self.__loaded = -1
+ self.__timestamp = 0
+
+ def _get_next(self):
+ # Get next frame
+ ret = self._decoder.get_next()
+ self.__physical_frame += 1
+
+ # Check if an error occurred
+ if ret is None:
+ self._reset() # Reset just to be safe
+ self.seek(0)
+ msg = "failed to decode next frame in WebP file"
+ raise EOFError(msg)
+
+ # Compute duration
+ data, timestamp = ret
+ duration = timestamp - self.__timestamp
+ self.__timestamp = timestamp
+
+ # libwebp gives frame end, adjust to start of frame
+ timestamp -= duration
+ return data, timestamp, duration
+
+ def _seek(self, frame):
+ if self.__physical_frame == frame:
+ return # Nothing to do
+ if frame < self.__physical_frame:
+ self._reset() # Rewind to beginning
+ while self.__physical_frame < frame:
+ self._get_next() # Advance to the requested frame
+
+ def load(self):
+ if _webp.HAVE_WEBPANIM:
+ if self.__loaded != self.__logical_frame:
+ self._seek(self.__logical_frame)
+
+ # We need to load the image data for this frame
+ data, timestamp, duration = self._get_next()
+ self.info["timestamp"] = timestamp
+ self.info["duration"] = duration
+ self.__loaded = self.__logical_frame
+
+ # Set tile
+ if self.fp and self._exclusive_fp:
+ self.fp.close()
+ self.fp = BytesIO(data)
+ self.tile = [("raw", (0, 0) + self.size, 0, self.rawmode)]
+
+ return super().load()
+
+ def tell(self):
+ if not _webp.HAVE_WEBPANIM:
+ return super().tell()
+
+ return self.__logical_frame
+
+
+def _save_all(im, fp, filename):
+ encoderinfo = im.encoderinfo.copy()
+ append_images = list(encoderinfo.get("append_images", []))
+
+ # If total frame count is 1, then save using the legacy API, which
+ # will preserve non-alpha modes
+ total = 0
+ for ims in [im] + append_images:
+ total += getattr(ims, "n_frames", 1)
+ if total == 1:
+ _save(im, fp, filename)
+ return
+
+ background = (0, 0, 0, 0)
+ if "background" in encoderinfo:
+ background = encoderinfo["background"]
+ elif "background" in im.info:
+ background = im.info["background"]
+ if isinstance(background, int):
+ # GifImagePlugin stores a global color table index in
+ # info["background"]. So it must be converted to an RGBA value
+ palette = im.getpalette()
+ if palette:
+ r, g, b = palette[background * 3 : (background + 1) * 3]
+ background = (r, g, b, 255)
+ else:
+ background = (background, background, background, 255)
+
+ duration = im.encoderinfo.get("duration", im.info.get("duration", 0))
+ loop = im.encoderinfo.get("loop", 0)
+ minimize_size = im.encoderinfo.get("minimize_size", False)
+ kmin = im.encoderinfo.get("kmin", None)
+ kmax = im.encoderinfo.get("kmax", None)
+ allow_mixed = im.encoderinfo.get("allow_mixed", False)
+ verbose = False
+ lossless = im.encoderinfo.get("lossless", False)
+ quality = im.encoderinfo.get("quality", 80)
+ method = im.encoderinfo.get("method", 0)
+ icc_profile = im.encoderinfo.get("icc_profile") or ""
+ exif = im.encoderinfo.get("exif", "")
+ if isinstance(exif, Image.Exif):
+ exif = exif.tobytes()
+ xmp = im.encoderinfo.get("xmp", "")
+ if allow_mixed:
+ lossless = False
+
+ # Sensible keyframe defaults are from gif2webp.c script
+ if kmin is None:
+ kmin = 9 if lossless else 3
+ if kmax is None:
+ kmax = 17 if lossless else 5
+
+ # Validate background color
+ if (
+ not isinstance(background, (list, tuple))
+ or len(background) != 4
+ or not all(0 <= v < 256 for v in background)
+ ):
+ msg = f"Background color is not an RGBA tuple clamped to (0-255): {background}"
+ raise OSError(msg)
+
+ # Convert to packed uint
+ bg_r, bg_g, bg_b, bg_a = background
+ background = (bg_a << 24) | (bg_r << 16) | (bg_g << 8) | (bg_b << 0)
+
+ # Setup the WebP animation encoder
+ enc = _webp.WebPAnimEncoder(
+ im.size[0],
+ im.size[1],
+ background,
+ loop,
+ minimize_size,
+ kmin,
+ kmax,
+ allow_mixed,
+ verbose,
+ )
+
+ # Add each frame
+ frame_idx = 0
+ timestamp = 0
+ cur_idx = im.tell()
+ try:
+ for ims in [im] + append_images:
+ # Get # of frames in this image
+ nfr = getattr(ims, "n_frames", 1)
+
+ for idx in range(nfr):
+ ims.seek(idx)
+ ims.load()
+
+ # Make sure image mode is supported
+ frame = ims
+ rawmode = ims.mode
+ if ims.mode not in _VALID_WEBP_MODES:
+ alpha = (
+ "A" in ims.mode
+ or "a" in ims.mode
+ or (ims.mode == "P" and "A" in ims.im.getpalettemode())
+ )
+ rawmode = "RGBA" if alpha else "RGB"
+ frame = ims.convert(rawmode)
+
+ if rawmode == "RGB":
+ # For faster conversion, use RGBX
+ rawmode = "RGBX"
+
+ # Append the frame to the animation encoder
+ enc.add(
+ frame.tobytes("raw", rawmode),
+ round(timestamp),
+ frame.size[0],
+ frame.size[1],
+ rawmode,
+ lossless,
+ quality,
+ method,
+ )
+
+ # Update timestamp and frame index
+ if isinstance(duration, (list, tuple)):
+ timestamp += duration[frame_idx]
+ else:
+ timestamp += duration
+ frame_idx += 1
+
+ finally:
+ im.seek(cur_idx)
+
+ # Force encoder to flush frames
+ enc.add(None, round(timestamp), 0, 0, "", lossless, quality, 0)
+
+ # Get the final output from the encoder
+ data = enc.assemble(icc_profile, exif, xmp)
+ if data is None:
+ msg = "cannot write file as WebP (encoder returned None)"
+ raise OSError(msg)
+
+ fp.write(data)
+
+
+def _save(im, fp, filename):
+ lossless = im.encoderinfo.get("lossless", False)
+ quality = im.encoderinfo.get("quality", 80)
+ icc_profile = im.encoderinfo.get("icc_profile") or ""
+ exif = im.encoderinfo.get("exif", b"")
+ if isinstance(exif, Image.Exif):
+ exif = exif.tobytes()
+ if exif.startswith(b"Exif\x00\x00"):
+ exif = exif[6:]
+ xmp = im.encoderinfo.get("xmp", "")
+ method = im.encoderinfo.get("method", 4)
+ exact = 1 if im.encoderinfo.get("exact") else 0
+
+ if im.mode not in _VALID_WEBP_LEGACY_MODES:
+ im = im.convert("RGBA" if im.has_transparency_data else "RGB")
+
+ data = _webp.WebPEncode(
+ im.tobytes(),
+ im.size[0],
+ im.size[1],
+ lossless,
+ float(quality),
+ im.mode,
+ icc_profile,
+ method,
+ exact,
+ exif,
+ xmp,
+ )
+ if data is None:
+ msg = "cannot write file as WebP (encoder returned None)"
+ raise OSError(msg)
+
+ fp.write(data)
+
+
+Image.register_open(WebPImageFile.format, WebPImageFile, _accept)
+if SUPPORTED:
+ Image.register_save(WebPImageFile.format, _save)
+ if _webp.HAVE_WEBPANIM:
+ Image.register_save_all(WebPImageFile.format, _save_all)
+ Image.register_extension(WebPImageFile.format, ".webp")
+ Image.register_mime(WebPImageFile.format, "image/webp")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/WmfImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/WmfImagePlugin.py
new file mode 100644
index 00000000..3e5fb015
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/WmfImagePlugin.py
@@ -0,0 +1,178 @@
+#
+# The Python Imaging Library
+# $Id$
+#
+# WMF stub codec
+#
+# history:
+# 1996-12-14 fl Created
+# 2004-02-22 fl Turned into a stub driver
+# 2004-02-23 fl Added EMF support
+#
+# Copyright (c) Secret Labs AB 1997-2004. All rights reserved.
+# Copyright (c) Fredrik Lundh 1996.
+#
+# See the README file for information on usage and redistribution.
+#
+# WMF/EMF reference documentation:
+# https://winprotocoldoc.blob.core.windows.net/productionwindowsarchives/MS-WMF/[MS-WMF].pdf
+# http://wvware.sourceforge.net/caolan/index.html
+# http://wvware.sourceforge.net/caolan/ora-wmf.html
+
+from . import Image, ImageFile
+from ._binary import i16le as word
+from ._binary import si16le as short
+from ._binary import si32le as _long
+
+_handler = None
+
+
+def register_handler(handler):
+ """
+ Install application-specific WMF image handler.
+
+ :param handler: Handler object.
+ """
+ global _handler
+ _handler = handler
+
+
+if hasattr(Image.core, "drawwmf"):
+ # install default handler (windows only)
+
+ class WmfHandler:
+ def open(self, im):
+ im._mode = "RGB"
+ self.bbox = im.info["wmf_bbox"]
+
+ def load(self, im):
+ im.fp.seek(0) # rewind
+ return Image.frombytes(
+ "RGB",
+ im.size,
+ Image.core.drawwmf(im.fp.read(), im.size, self.bbox),
+ "raw",
+ "BGR",
+ (im.size[0] * 3 + 3) & -4,
+ -1,
+ )
+
+ register_handler(WmfHandler())
+
+#
+# --------------------------------------------------------------------
+# Read WMF file
+
+
+def _accept(prefix):
+ return (
+ prefix[:6] == b"\xd7\xcd\xc6\x9a\x00\x00" or prefix[:4] == b"\x01\x00\x00\x00"
+ )
+
+
+##
+# Image plugin for Windows metafiles.
+
+
+class WmfStubImageFile(ImageFile.StubImageFile):
+ format = "WMF"
+ format_description = "Windows Metafile"
+
+ def _open(self):
+ self._inch = None
+
+ # check placable header
+ s = self.fp.read(80)
+
+ if s[:6] == b"\xd7\xcd\xc6\x9a\x00\x00":
+ # placeable windows metafile
+
+ # get units per inch
+ self._inch = word(s, 14)
+
+ # get bounding box
+ x0 = short(s, 6)
+ y0 = short(s, 8)
+ x1 = short(s, 10)
+ y1 = short(s, 12)
+
+ # normalize size to 72 dots per inch
+ self.info["dpi"] = 72
+ size = (
+ (x1 - x0) * self.info["dpi"] // self._inch,
+ (y1 - y0) * self.info["dpi"] // self._inch,
+ )
+
+ self.info["wmf_bbox"] = x0, y0, x1, y1
+
+ # sanity check (standard metafile header)
+ if s[22:26] != b"\x01\x00\t\x00":
+ msg = "Unsupported WMF file format"
+ raise SyntaxError(msg)
+
+ elif s[:4] == b"\x01\x00\x00\x00" and s[40:44] == b" EMF":
+ # enhanced metafile
+
+ # get bounding box
+ x0 = _long(s, 8)
+ y0 = _long(s, 12)
+ x1 = _long(s, 16)
+ y1 = _long(s, 20)
+
+ # get frame (in 0.01 millimeter units)
+ frame = _long(s, 24), _long(s, 28), _long(s, 32), _long(s, 36)
+
+ size = x1 - x0, y1 - y0
+
+ # calculate dots per inch from bbox and frame
+ xdpi = 2540.0 * (x1 - y0) / (frame[2] - frame[0])
+ ydpi = 2540.0 * (y1 - y0) / (frame[3] - frame[1])
+
+ self.info["wmf_bbox"] = x0, y0, x1, y1
+
+ if xdpi == ydpi:
+ self.info["dpi"] = xdpi
+ else:
+ self.info["dpi"] = xdpi, ydpi
+
+ else:
+ msg = "Unsupported file format"
+ raise SyntaxError(msg)
+
+ self._mode = "RGB"
+ self._size = size
+
+ loader = self._load()
+ if loader:
+ loader.open(self)
+
+ def _load(self):
+ return _handler
+
+ def load(self, dpi=None):
+ if dpi is not None and self._inch is not None:
+ self.info["dpi"] = dpi
+ x0, y0, x1, y1 = self.info["wmf_bbox"]
+ self._size = (
+ (x1 - x0) * self.info["dpi"] // self._inch,
+ (y1 - y0) * self.info["dpi"] // self._inch,
+ )
+ return super().load()
+
+
+def _save(im, fp, filename):
+ if _handler is None or not hasattr(_handler, "save"):
+ msg = "WMF save handler not installed"
+ raise OSError(msg)
+ _handler.save(im, fp, filename)
+
+
+#
+# --------------------------------------------------------------------
+# Registry stuff
+
+
+Image.register_open(WmfStubImageFile.format, WmfStubImageFile, _accept)
+Image.register_save(WmfStubImageFile.format, _save)
+
+Image.register_extensions(WmfStubImageFile.format, [".wmf", ".emf"])
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/XVThumbImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/XVThumbImagePlugin.py
new file mode 100644
index 00000000..eda60c5c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/XVThumbImagePlugin.py
@@ -0,0 +1,78 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# XV Thumbnail file handler by Charles E. "Gene" Cash
+# (gcash@magicnet.net)
+#
+# see xvcolor.c and xvbrowse.c in the sources to John Bradley's XV,
+# available from ftp://ftp.cis.upenn.edu/pub/xv/
+#
+# history:
+# 98-08-15 cec created (b/w only)
+# 98-12-09 cec added color palette
+# 98-12-28 fl added to PIL (with only a few very minor modifications)
+#
+# To do:
+# FIXME: make save work (this requires quantization support)
+#
+
+from . import Image, ImageFile, ImagePalette
+from ._binary import o8
+
+_MAGIC = b"P7 332"
+
+# standard color palette for thumbnails (RGB332)
+PALETTE = b""
+for r in range(8):
+ for g in range(8):
+ for b in range(4):
+ PALETTE = PALETTE + (
+ o8((r * 255) // 7) + o8((g * 255) // 7) + o8((b * 255) // 3)
+ )
+
+
+def _accept(prefix):
+ return prefix[:6] == _MAGIC
+
+
+##
+# Image plugin for XV thumbnail images.
+
+
+class XVThumbImageFile(ImageFile.ImageFile):
+ format = "XVThumb"
+ format_description = "XV thumbnail image"
+
+ def _open(self):
+ # check magic
+ if not _accept(self.fp.read(6)):
+ msg = "not an XV thumbnail file"
+ raise SyntaxError(msg)
+
+ # Skip to beginning of next line
+ self.fp.readline()
+
+ # skip info comments
+ while True:
+ s = self.fp.readline()
+ if not s:
+ msg = "Unexpected EOF reading XV thumbnail file"
+ raise SyntaxError(msg)
+ if s[0] != 35: # ie. when not a comment: '#'
+ break
+
+ # parse header line (already read)
+ s = s.strip().split()
+
+ self._mode = "P"
+ self._size = int(s[0]), int(s[1])
+
+ self.palette = ImagePalette.raw("RGB", PALETTE)
+
+ self.tile = [("raw", (0, 0) + self.size, self.fp.tell(), (self.mode, 0, 1))]
+
+
+# --------------------------------------------------------------------
+
+Image.register_open(XVThumbImageFile.format, XVThumbImageFile, _accept)
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/XbmImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/XbmImagePlugin.py
new file mode 100644
index 00000000..71cd57d7
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/XbmImagePlugin.py
@@ -0,0 +1,94 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# XBM File handling
+#
+# History:
+# 1995-09-08 fl Created
+# 1996-11-01 fl Added save support
+# 1997-07-07 fl Made header parser more tolerant
+# 1997-07-22 fl Fixed yet another parser bug
+# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4)
+# 2001-05-13 fl Added hotspot handling (based on code from Bernhard Herzog)
+# 2004-02-24 fl Allow some whitespace before first #define
+#
+# Copyright (c) 1997-2004 by Secret Labs AB
+# Copyright (c) 1996-1997 by Fredrik Lundh
+#
+# See the README file for information on usage and redistribution.
+#
+
+import re
+
+from . import Image, ImageFile
+
+# XBM header
+xbm_head = re.compile(
+ rb"\s*#define[ \t]+.*_width[ \t]+(?P[0-9]+)[\r\n]+"
+ b"#define[ \t]+.*_height[ \t]+(?P[0-9]+)[\r\n]+"
+ b"(?P"
+ b"#define[ \t]+[^_]*_x_hot[ \t]+(?P[0-9]+)[\r\n]+"
+ b"#define[ \t]+[^_]*_y_hot[ \t]+(?P[0-9]+)[\r\n]+"
+ b")?"
+ rb"[\000-\377]*_bits\[]"
+)
+
+
+def _accept(prefix):
+ return prefix.lstrip()[:7] == b"#define"
+
+
+##
+# Image plugin for X11 bitmaps.
+
+
+class XbmImageFile(ImageFile.ImageFile):
+ format = "XBM"
+ format_description = "X11 Bitmap"
+
+ def _open(self):
+ m = xbm_head.match(self.fp.read(512))
+
+ if not m:
+ msg = "not a XBM file"
+ raise SyntaxError(msg)
+
+ xsize = int(m.group("width"))
+ ysize = int(m.group("height"))
+
+ if m.group("hotspot"):
+ self.info["hotspot"] = (int(m.group("xhot")), int(m.group("yhot")))
+
+ self._mode = "1"
+ self._size = xsize, ysize
+
+ self.tile = [("xbm", (0, 0) + self.size, m.end(), None)]
+
+
+def _save(im, fp, filename):
+ if im.mode != "1":
+ msg = f"cannot write mode {im.mode} as XBM"
+ raise OSError(msg)
+
+ fp.write(f"#define im_width {im.size[0]}\n".encode("ascii"))
+ fp.write(f"#define im_height {im.size[1]}\n".encode("ascii"))
+
+ hotspot = im.encoderinfo.get("hotspot")
+ if hotspot:
+ fp.write(f"#define im_x_hot {hotspot[0]}\n".encode("ascii"))
+ fp.write(f"#define im_y_hot {hotspot[1]}\n".encode("ascii"))
+
+ fp.write(b"static char im_bits[] = {\n")
+
+ ImageFile._save(im, fp, [("xbm", (0, 0) + im.size, 0, None)])
+
+ fp.write(b"};\n")
+
+
+Image.register_open(XbmImageFile.format, XbmImageFile, _accept)
+Image.register_save(XbmImageFile.format, _save)
+
+Image.register_extension(XbmImageFile.format, ".xbm")
+
+Image.register_mime(XbmImageFile.format, "image/xbm")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/XpmImagePlugin.py b/Backend/venv/lib/python3.12/site-packages/PIL/XpmImagePlugin.py
new file mode 100644
index 00000000..8491d3b7
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/XpmImagePlugin.py
@@ -0,0 +1,128 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# XPM File handling
+#
+# History:
+# 1996-12-29 fl Created
+# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7)
+#
+# Copyright (c) Secret Labs AB 1997-2001.
+# Copyright (c) Fredrik Lundh 1996-2001.
+#
+# See the README file for information on usage and redistribution.
+#
+
+
+import re
+
+from . import Image, ImageFile, ImagePalette
+from ._binary import o8
+
+# XPM header
+xpm_head = re.compile(b'"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)')
+
+
+def _accept(prefix):
+ return prefix[:9] == b"/* XPM */"
+
+
+##
+# Image plugin for X11 pixel maps.
+
+
+class XpmImageFile(ImageFile.ImageFile):
+ format = "XPM"
+ format_description = "X11 Pixel Map"
+
+ def _open(self):
+ if not _accept(self.fp.read(9)):
+ msg = "not an XPM file"
+ raise SyntaxError(msg)
+
+ # skip forward to next string
+ while True:
+ s = self.fp.readline()
+ if not s:
+ msg = "broken XPM file"
+ raise SyntaxError(msg)
+ m = xpm_head.match(s)
+ if m:
+ break
+
+ self._size = int(m.group(1)), int(m.group(2))
+
+ pal = int(m.group(3))
+ bpp = int(m.group(4))
+
+ if pal > 256 or bpp != 1:
+ msg = "cannot read this XPM file"
+ raise ValueError(msg)
+
+ #
+ # load palette description
+
+ palette = [b"\0\0\0"] * 256
+
+ for _ in range(pal):
+ s = self.fp.readline()
+ if s[-2:] == b"\r\n":
+ s = s[:-2]
+ elif s[-1:] in b"\r\n":
+ s = s[:-1]
+
+ c = s[1]
+ s = s[2:-2].split()
+
+ for i in range(0, len(s), 2):
+ if s[i] == b"c":
+ # process colour key
+ rgb = s[i + 1]
+ if rgb == b"None":
+ self.info["transparency"] = c
+ elif rgb[:1] == b"#":
+ # FIXME: handle colour names (see ImagePalette.py)
+ rgb = int(rgb[1:], 16)
+ palette[c] = (
+ o8((rgb >> 16) & 255) + o8((rgb >> 8) & 255) + o8(rgb & 255)
+ )
+ else:
+ # unknown colour
+ msg = "cannot read this XPM file"
+ raise ValueError(msg)
+ break
+
+ else:
+ # missing colour key
+ msg = "cannot read this XPM file"
+ raise ValueError(msg)
+
+ self._mode = "P"
+ self.palette = ImagePalette.raw("RGB", b"".join(palette))
+
+ self.tile = [("raw", (0, 0) + self.size, self.fp.tell(), ("P", 0, 1))]
+
+ def load_read(self, bytes):
+ #
+ # load all image data in one chunk
+
+ xsize, ysize = self.size
+
+ s = [None] * ysize
+
+ for i in range(ysize):
+ s[i] = self.fp.readline()[1 : xsize + 1].ljust(xsize)
+
+ return b"".join(s)
+
+
+#
+# Registry
+
+
+Image.register_open(XpmImageFile.format, XpmImageFile, _accept)
+
+Image.register_extension(XpmImageFile.format, ".xpm")
+
+Image.register_mime(XpmImageFile.format, "image/xpm")
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__init__.py b/Backend/venv/lib/python3.12/site-packages/PIL/__init__.py
new file mode 100644
index 00000000..2bb8f6d7
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/__init__.py
@@ -0,0 +1,84 @@
+"""Pillow (Fork of the Python Imaging Library)
+
+Pillow is the friendly PIL fork by Jeffrey A. Clark (Alex) and contributors.
+ https://github.com/python-pillow/Pillow/
+
+Pillow is forked from PIL 1.1.7.
+
+PIL is the Python Imaging Library by Fredrik Lundh and contributors.
+Copyright (c) 1999 by Secret Labs AB.
+
+Use PIL.__version__ for this Pillow version.
+
+;-)
+"""
+
+from . import _version
+
+# VERSION was removed in Pillow 6.0.0.
+# PILLOW_VERSION was removed in Pillow 9.0.0.
+# Use __version__ instead.
+__version__ = _version.__version__
+del _version
+
+
+_plugins = [
+ "BlpImagePlugin",
+ "BmpImagePlugin",
+ "BufrStubImagePlugin",
+ "CurImagePlugin",
+ "DcxImagePlugin",
+ "DdsImagePlugin",
+ "EpsImagePlugin",
+ "FitsImagePlugin",
+ "FliImagePlugin",
+ "FpxImagePlugin",
+ "FtexImagePlugin",
+ "GbrImagePlugin",
+ "GifImagePlugin",
+ "GribStubImagePlugin",
+ "Hdf5StubImagePlugin",
+ "IcnsImagePlugin",
+ "IcoImagePlugin",
+ "ImImagePlugin",
+ "ImtImagePlugin",
+ "IptcImagePlugin",
+ "JpegImagePlugin",
+ "Jpeg2KImagePlugin",
+ "McIdasImagePlugin",
+ "MicImagePlugin",
+ "MpegImagePlugin",
+ "MpoImagePlugin",
+ "MspImagePlugin",
+ "PalmImagePlugin",
+ "PcdImagePlugin",
+ "PcxImagePlugin",
+ "PdfImagePlugin",
+ "PixarImagePlugin",
+ "PngImagePlugin",
+ "PpmImagePlugin",
+ "PsdImagePlugin",
+ "QoiImagePlugin",
+ "SgiImagePlugin",
+ "SpiderImagePlugin",
+ "SunImagePlugin",
+ "TgaImagePlugin",
+ "TiffImagePlugin",
+ "WebPImagePlugin",
+ "WmfImagePlugin",
+ "XbmImagePlugin",
+ "XpmImagePlugin",
+ "XVThumbImagePlugin",
+]
+
+
+class UnidentifiedImageError(OSError):
+ """
+ Raised in :py:meth:`PIL.Image.open` if an image cannot be opened and identified.
+
+ If a PNG image raises this error, setting :data:`.ImageFile.LOAD_TRUNCATED_IMAGES`
+ to true may allow the image to be opened after all. The setting will ignore missing
+ data and checksum failures.
+ """
+
+ pass
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__main__.py b/Backend/venv/lib/python3.12/site-packages/PIL/__main__.py
new file mode 100644
index 00000000..a05323f9
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/__main__.py
@@ -0,0 +1,3 @@
+from .features import pilinfo
+
+pilinfo()
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/BdfFontFile.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/BdfFontFile.cpython-312.pyc
new file mode 100644
index 00000000..2f7e7086
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/BdfFontFile.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/BlpImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/BlpImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..d63c65cb
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/BlpImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/BmpImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/BmpImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..38882149
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/BmpImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/BufrStubImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/BufrStubImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..53dfd45e
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/BufrStubImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ContainerIO.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ContainerIO.cpython-312.pyc
new file mode 100644
index 00000000..cbe9efba
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ContainerIO.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/CurImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/CurImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..089af9fc
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/CurImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/DcxImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/DcxImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..b6cb3a9e
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/DcxImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/DdsImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/DdsImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..fcb17c60
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/DdsImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/EpsImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/EpsImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..cc1a5c98
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/EpsImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ExifTags.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ExifTags.cpython-312.pyc
new file mode 100644
index 00000000..fffe31f7
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ExifTags.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/FitsImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/FitsImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..1aea9f4c
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/FitsImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/FliImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/FliImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..82e7070c
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/FliImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/FontFile.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/FontFile.cpython-312.pyc
new file mode 100644
index 00000000..ce6f3940
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/FontFile.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/FpxImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/FpxImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..30fb45a1
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/FpxImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/FtexImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/FtexImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..d6d8315a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/FtexImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/GbrImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/GbrImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..bce9d9ab
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/GbrImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/GdImageFile.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/GdImageFile.cpython-312.pyc
new file mode 100644
index 00000000..20f6da3a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/GdImageFile.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/GifImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/GifImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..d1d1fa60
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/GifImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/GimpGradientFile.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/GimpGradientFile.cpython-312.pyc
new file mode 100644
index 00000000..e6008548
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/GimpGradientFile.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/GimpPaletteFile.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/GimpPaletteFile.cpython-312.pyc
new file mode 100644
index 00000000..5cd446a3
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/GimpPaletteFile.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/GribStubImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/GribStubImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..d2a30975
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/GribStubImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/Hdf5StubImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/Hdf5StubImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..82bd578c
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/Hdf5StubImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/IcnsImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/IcnsImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..47f0192e
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/IcnsImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/IcoImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/IcoImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..d5232c34
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/IcoImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..975402ef
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/Image.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/Image.cpython-312.pyc
new file mode 100644
index 00000000..7dad132e
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/Image.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageChops.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageChops.cpython-312.pyc
new file mode 100644
index 00000000..b9b9eac0
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageChops.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageCms.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageCms.cpython-312.pyc
new file mode 100644
index 00000000..7239387d
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageCms.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageColor.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageColor.cpython-312.pyc
new file mode 100644
index 00000000..21b3b373
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageColor.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageDraw.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageDraw.cpython-312.pyc
new file mode 100644
index 00000000..674b75b3
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageDraw.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageDraw2.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageDraw2.cpython-312.pyc
new file mode 100644
index 00000000..6bf3542a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageDraw2.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageEnhance.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageEnhance.cpython-312.pyc
new file mode 100644
index 00000000..9ba882e9
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageEnhance.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageFile.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageFile.cpython-312.pyc
new file mode 100644
index 00000000..89c6075f
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageFile.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageFilter.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageFilter.cpython-312.pyc
new file mode 100644
index 00000000..37e94a6f
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageFilter.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageFont.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageFont.cpython-312.pyc
new file mode 100644
index 00000000..80c4bd6a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageFont.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageGrab.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageGrab.cpython-312.pyc
new file mode 100644
index 00000000..5d1b1ace
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageGrab.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageMath.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageMath.cpython-312.pyc
new file mode 100644
index 00000000..4e91d3d1
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageMath.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageMode.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageMode.cpython-312.pyc
new file mode 100644
index 00000000..2a4f3b80
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageMode.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageMorph.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageMorph.cpython-312.pyc
new file mode 100644
index 00000000..47f1cdfb
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageMorph.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageOps.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageOps.cpython-312.pyc
new file mode 100644
index 00000000..3b2263a6
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageOps.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImagePalette.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImagePalette.cpython-312.pyc
new file mode 100644
index 00000000..f44f7cd6
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImagePalette.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImagePath.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImagePath.cpython-312.pyc
new file mode 100644
index 00000000..a031cebf
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImagePath.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageQt.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageQt.cpython-312.pyc
new file mode 100644
index 00000000..81da3b45
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageQt.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageSequence.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageSequence.cpython-312.pyc
new file mode 100644
index 00000000..e510db4a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageSequence.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageShow.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageShow.cpython-312.pyc
new file mode 100644
index 00000000..968f9a6c
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageShow.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageStat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageStat.cpython-312.pyc
new file mode 100644
index 00000000..71ef79f5
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageStat.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageTk.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageTk.cpython-312.pyc
new file mode 100644
index 00000000..39b04de1
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageTk.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageTransform.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageTransform.cpython-312.pyc
new file mode 100644
index 00000000..bd6305b9
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageTransform.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageWin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageWin.cpython-312.pyc
new file mode 100644
index 00000000..fd5890bb
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImageWin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImtImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImtImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..eb866cca
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/ImtImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/IptcImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/IptcImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..44ac312e
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/IptcImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/Jpeg2KImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/Jpeg2KImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..34a9e98b
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/Jpeg2KImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/JpegImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/JpegImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..c4a9e8e7
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/JpegImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/JpegPresets.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/JpegPresets.cpython-312.pyc
new file mode 100644
index 00000000..666f3878
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/JpegPresets.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/McIdasImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/McIdasImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..96f1ef37
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/McIdasImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/MicImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/MicImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..9342ba37
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/MicImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/MpegImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/MpegImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..25fa9166
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/MpegImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/MpoImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/MpoImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..35cd6b53
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/MpoImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/MspImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/MspImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..35e30d03
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/MspImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PSDraw.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PSDraw.cpython-312.pyc
new file mode 100644
index 00000000..64d63256
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PSDraw.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PaletteFile.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PaletteFile.cpython-312.pyc
new file mode 100644
index 00000000..f13c4a44
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PaletteFile.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PalmImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PalmImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..6ca4f077
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PalmImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PcdImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PcdImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..fa5b3586
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PcdImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PcfFontFile.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PcfFontFile.cpython-312.pyc
new file mode 100644
index 00000000..9fa1945c
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PcfFontFile.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PcxImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PcxImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..7f487b3a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PcxImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PdfImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PdfImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..6f55b009
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PdfImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PdfParser.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PdfParser.cpython-312.pyc
new file mode 100644
index 00000000..06b26a9b
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PdfParser.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PixarImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PixarImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..30ce2374
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PixarImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PngImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PngImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..f687e4f8
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PngImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PpmImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PpmImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..ecad6de5
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PpmImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PsdImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PsdImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..2b072921
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PsdImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PyAccess.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PyAccess.cpython-312.pyc
new file mode 100644
index 00000000..cda1dce6
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/PyAccess.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/QoiImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/QoiImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..ba109003
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/QoiImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/SgiImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/SgiImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..2c82f55d
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/SgiImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/SpiderImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/SpiderImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..07871e0a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/SpiderImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/SunImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/SunImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..82e74ef2
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/SunImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/TarIO.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/TarIO.cpython-312.pyc
new file mode 100644
index 00000000..a40f7ba3
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/TarIO.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/TgaImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/TgaImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..a9650cfc
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/TgaImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/TiffImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/TiffImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..35409252
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/TiffImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/TiffTags.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/TiffTags.cpython-312.pyc
new file mode 100644
index 00000000..0621cb14
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/TiffTags.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/WalImageFile.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/WalImageFile.cpython-312.pyc
new file mode 100644
index 00000000..d80c0c4c
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/WalImageFile.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/WebPImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/WebPImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..aae3aa74
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/WebPImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/WmfImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/WmfImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..aa506230
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/WmfImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/XVThumbImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/XVThumbImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..dbed742b
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/XVThumbImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/XbmImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/XbmImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..5cbb383e
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/XbmImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/XpmImagePlugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/XpmImagePlugin.cpython-312.pyc
new file mode 100644
index 00000000..8cd0f1a2
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/XpmImagePlugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..f729fb98
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/__main__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/__main__.cpython-312.pyc
new file mode 100644
index 00000000..881cdd96
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/__main__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/_binary.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/_binary.cpython-312.pyc
new file mode 100644
index 00000000..21a17b6a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/_binary.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/_deprecate.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/_deprecate.cpython-312.pyc
new file mode 100644
index 00000000..af802a17
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/_deprecate.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/_tkinter_finder.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/_tkinter_finder.cpython-312.pyc
new file mode 100644
index 00000000..8c888b9f
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/_tkinter_finder.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/_util.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/_util.cpython-312.pyc
new file mode 100644
index 00000000..81bd7b13
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/_util.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/_version.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/_version.cpython-312.pyc
new file mode 100644
index 00000000..4cac4508
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/_version.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/features.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/features.cpython-312.pyc
new file mode 100644
index 00000000..170edf57
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/__pycache__/features.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/_binary.py b/Backend/venv/lib/python3.12/site-packages/PIL/_binary.py
new file mode 100644
index 00000000..a74ee9eb
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/_binary.py
@@ -0,0 +1,102 @@
+#
+# The Python Imaging Library.
+# $Id$
+#
+# Binary input/output support routines.
+#
+# Copyright (c) 1997-2003 by Secret Labs AB
+# Copyright (c) 1995-2003 by Fredrik Lundh
+# Copyright (c) 2012 by Brian Crowell
+#
+# See the README file for information on usage and redistribution.
+#
+
+
+"""Binary input/output support routines."""
+
+
+from struct import pack, unpack_from
+
+
+def i8(c):
+ return c if c.__class__ is int else c[0]
+
+
+def o8(i):
+ return bytes((i & 255,))
+
+
+# Input, le = little endian, be = big endian
+def i16le(c, o=0):
+ """
+ Converts a 2-bytes (16 bits) string to an unsigned integer.
+
+ :param c: string containing bytes to convert
+ :param o: offset of bytes to convert in string
+ """
+ return unpack_from("h", c, o)[0]
+
+
+def i32le(c, o=0):
+ """
+ Converts a 4-bytes (32 bits) string to an unsigned integer.
+
+ :param c: string containing bytes to convert
+ :param o: offset of bytes to convert in string
+ """
+ return unpack_from("H", c, o)[0]
+
+
+def i32be(c, o=0):
+ return unpack_from(">I", c, o)[0]
+
+
+# Output, le = little endian, be = big endian
+def o16le(i):
+ return pack("H", i)
+
+
+def o32be(i):
+ return pack(">I", i)
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/_deprecate.py b/Backend/venv/lib/python3.12/site-packages/PIL/_deprecate.py
new file mode 100644
index 00000000..2f2a3df1
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/_deprecate.py
@@ -0,0 +1,69 @@
+from __future__ import annotations
+
+import warnings
+
+from . import __version__
+
+
+def deprecate(
+ deprecated: str,
+ when: int | None,
+ replacement: str | None = None,
+ *,
+ action: str | None = None,
+ plural: bool = False,
+) -> None:
+ """
+ Deprecations helper.
+
+ :param deprecated: Name of thing to be deprecated.
+ :param when: Pillow major version to be removed in.
+ :param replacement: Name of replacement.
+ :param action: Instead of "replacement", give a custom call to action
+ e.g. "Upgrade to new thing".
+ :param plural: if the deprecated thing is plural, needing "are" instead of "is".
+
+ Usually of the form:
+
+ "[deprecated] is deprecated and will be removed in Pillow [when] (yyyy-mm-dd).
+ Use [replacement] instead."
+
+ You can leave out the replacement sentence:
+
+ "[deprecated] is deprecated and will be removed in Pillow [when] (yyyy-mm-dd)"
+
+ Or with another call to action:
+
+ "[deprecated] is deprecated and will be removed in Pillow [when] (yyyy-mm-dd).
+ [action]."
+ """
+
+ is_ = "are" if plural else "is"
+
+ if when is None:
+ removed = "a future version"
+ elif when <= int(__version__.split(".")[0]):
+ msg = f"{deprecated} {is_} deprecated and should be removed."
+ raise RuntimeError(msg)
+ elif when == 11:
+ removed = "Pillow 11 (2024-10-15)"
+ else:
+ msg = f"Unknown removal version: {when}. Update {__name__}?"
+ raise ValueError(msg)
+
+ if replacement and action:
+ msg = "Use only one of 'replacement' and 'action'"
+ raise ValueError(msg)
+
+ if replacement:
+ action = f". Use {replacement} instead."
+ elif action:
+ action = f". {action.rstrip('.')}."
+ else:
+ action = ""
+
+ warnings.warn(
+ f"{deprecated} {is_} deprecated and will be removed in {removed}{action}",
+ DeprecationWarning,
+ stacklevel=3,
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/_imaging.cpython-312-x86_64-linux-gnu.so b/Backend/venv/lib/python3.12/site-packages/PIL/_imaging.cpython-312-x86_64-linux-gnu.so
new file mode 100755
index 00000000..8d679736
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/_imaging.cpython-312-x86_64-linux-gnu.so differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/_imagingcms.cpython-312-x86_64-linux-gnu.so b/Backend/venv/lib/python3.12/site-packages/PIL/_imagingcms.cpython-312-x86_64-linux-gnu.so
new file mode 100755
index 00000000..280b8cb5
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/_imagingcms.cpython-312-x86_64-linux-gnu.so differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/_imagingft.cpython-312-x86_64-linux-gnu.so b/Backend/venv/lib/python3.12/site-packages/PIL/_imagingft.cpython-312-x86_64-linux-gnu.so
new file mode 100755
index 00000000..0b17632d
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/_imagingft.cpython-312-x86_64-linux-gnu.so differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/_imagingmath.cpython-312-x86_64-linux-gnu.so b/Backend/venv/lib/python3.12/site-packages/PIL/_imagingmath.cpython-312-x86_64-linux-gnu.so
new file mode 100755
index 00000000..aee381e6
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/_imagingmath.cpython-312-x86_64-linux-gnu.so differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/_imagingmorph.cpython-312-x86_64-linux-gnu.so b/Backend/venv/lib/python3.12/site-packages/PIL/_imagingmorph.cpython-312-x86_64-linux-gnu.so
new file mode 100755
index 00000000..1eff2e1d
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/_imagingmorph.cpython-312-x86_64-linux-gnu.so differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/_imagingtk.cpython-312-x86_64-linux-gnu.so b/Backend/venv/lib/python3.12/site-packages/PIL/_imagingtk.cpython-312-x86_64-linux-gnu.so
new file mode 100755
index 00000000..972b9f39
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/_imagingtk.cpython-312-x86_64-linux-gnu.so differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/_tkinter_finder.py b/Backend/venv/lib/python3.12/site-packages/PIL/_tkinter_finder.py
new file mode 100644
index 00000000..597c21b5
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/_tkinter_finder.py
@@ -0,0 +1,17 @@
+""" Find compiled module linking to Tcl / Tk libraries
+"""
+import sys
+import tkinter
+from tkinter import _tkinter as tk
+
+try:
+ if hasattr(sys, "pypy_find_executable"):
+ TKINTER_LIB = tk.tklib_cffi.__file__
+ else:
+ TKINTER_LIB = tk.__file__
+except AttributeError:
+ # _tkinter may be compiled directly into Python, in which case __file__ is
+ # not available. load_tkinter_funcs will check the binary first in any case.
+ TKINTER_LIB = None
+
+tk_version = str(tkinter.TkVersion)
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/_util.py b/Backend/venv/lib/python3.12/site-packages/PIL/_util.py
new file mode 100644
index 00000000..ba27b7e4
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/_util.py
@@ -0,0 +1,19 @@
+import os
+from pathlib import Path
+
+
+def is_path(f):
+ return isinstance(f, (bytes, str, Path))
+
+
+def is_directory(f):
+ """Checks if an object is a string, and that it points to a directory."""
+ return is_path(f) and os.path.isdir(f)
+
+
+class DeferredError:
+ def __init__(self, ex):
+ self.ex = ex
+
+ def __getattr__(self, elt):
+ raise self.ex
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/_version.py b/Backend/venv/lib/python3.12/site-packages/PIL/_version.py
new file mode 100644
index 00000000..0936d1a7
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/_version.py
@@ -0,0 +1,2 @@
+# Master version for Pillow
+__version__ = "10.1.0"
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/_webp.cpython-312-x86_64-linux-gnu.so b/Backend/venv/lib/python3.12/site-packages/PIL/_webp.cpython-312-x86_64-linux-gnu.so
new file mode 100755
index 00000000..aa453649
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/PIL/_webp.cpython-312-x86_64-linux-gnu.so differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PIL/features.py b/Backend/venv/lib/python3.12/site-packages/PIL/features.py
new file mode 100644
index 00000000..f14e60cf
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PIL/features.py
@@ -0,0 +1,329 @@
+import collections
+import os
+import sys
+import warnings
+
+import PIL
+
+from . import Image
+
+modules = {
+ "pil": ("PIL._imaging", "PILLOW_VERSION"),
+ "tkinter": ("PIL._tkinter_finder", "tk_version"),
+ "freetype2": ("PIL._imagingft", "freetype2_version"),
+ "littlecms2": ("PIL._imagingcms", "littlecms_version"),
+ "webp": ("PIL._webp", "webpdecoder_version"),
+}
+
+
+def check_module(feature):
+ """
+ Checks if a module is available.
+
+ :param feature: The module to check for.
+ :returns: ``True`` if available, ``False`` otherwise.
+ :raises ValueError: If the module is not defined in this version of Pillow.
+ """
+ if feature not in modules:
+ msg = f"Unknown module {feature}"
+ raise ValueError(msg)
+
+ module, ver = modules[feature]
+
+ try:
+ __import__(module)
+ return True
+ except ModuleNotFoundError:
+ return False
+ except ImportError as ex:
+ warnings.warn(str(ex))
+ return False
+
+
+def version_module(feature):
+ """
+ :param feature: The module to check for.
+ :returns:
+ The loaded version number as a string, or ``None`` if unknown or not available.
+ :raises ValueError: If the module is not defined in this version of Pillow.
+ """
+ if not check_module(feature):
+ return None
+
+ module, ver = modules[feature]
+
+ if ver is None:
+ return None
+
+ return getattr(__import__(module, fromlist=[ver]), ver)
+
+
+def get_supported_modules():
+ """
+ :returns: A list of all supported modules.
+ """
+ return [f for f in modules if check_module(f)]
+
+
+codecs = {
+ "jpg": ("jpeg", "jpeglib"),
+ "jpg_2000": ("jpeg2k", "jp2klib"),
+ "zlib": ("zip", "zlib"),
+ "libtiff": ("libtiff", "libtiff"),
+}
+
+
+def check_codec(feature):
+ """
+ Checks if a codec is available.
+
+ :param feature: The codec to check for.
+ :returns: ``True`` if available, ``False`` otherwise.
+ :raises ValueError: If the codec is not defined in this version of Pillow.
+ """
+ if feature not in codecs:
+ msg = f"Unknown codec {feature}"
+ raise ValueError(msg)
+
+ codec, lib = codecs[feature]
+
+ return codec + "_encoder" in dir(Image.core)
+
+
+def version_codec(feature):
+ """
+ :param feature: The codec to check for.
+ :returns:
+ The version number as a string, or ``None`` if not available.
+ Checked at compile time for ``jpg``, run-time otherwise.
+ :raises ValueError: If the codec is not defined in this version of Pillow.
+ """
+ if not check_codec(feature):
+ return None
+
+ codec, lib = codecs[feature]
+
+ version = getattr(Image.core, lib + "_version")
+
+ if feature == "libtiff":
+ return version.split("\n")[0].split("Version ")[1]
+
+ return version
+
+
+def get_supported_codecs():
+ """
+ :returns: A list of all supported codecs.
+ """
+ return [f for f in codecs if check_codec(f)]
+
+
+features = {
+ "webp_anim": ("PIL._webp", "HAVE_WEBPANIM", None),
+ "webp_mux": ("PIL._webp", "HAVE_WEBPMUX", None),
+ "transp_webp": ("PIL._webp", "HAVE_TRANSPARENCY", None),
+ "raqm": ("PIL._imagingft", "HAVE_RAQM", "raqm_version"),
+ "fribidi": ("PIL._imagingft", "HAVE_FRIBIDI", "fribidi_version"),
+ "harfbuzz": ("PIL._imagingft", "HAVE_HARFBUZZ", "harfbuzz_version"),
+ "libjpeg_turbo": ("PIL._imaging", "HAVE_LIBJPEGTURBO", "libjpeg_turbo_version"),
+ "libimagequant": ("PIL._imaging", "HAVE_LIBIMAGEQUANT", "imagequant_version"),
+ "xcb": ("PIL._imaging", "HAVE_XCB", None),
+}
+
+
+def check_feature(feature):
+ """
+ Checks if a feature is available.
+
+ :param feature: The feature to check for.
+ :returns: ``True`` if available, ``False`` if unavailable, ``None`` if unknown.
+ :raises ValueError: If the feature is not defined in this version of Pillow.
+ """
+ if feature not in features:
+ msg = f"Unknown feature {feature}"
+ raise ValueError(msg)
+
+ module, flag, ver = features[feature]
+
+ try:
+ imported_module = __import__(module, fromlist=["PIL"])
+ return getattr(imported_module, flag)
+ except ModuleNotFoundError:
+ return None
+ except ImportError as ex:
+ warnings.warn(str(ex))
+ return None
+
+
+def version_feature(feature):
+ """
+ :param feature: The feature to check for.
+ :returns: The version number as a string, or ``None`` if not available.
+ :raises ValueError: If the feature is not defined in this version of Pillow.
+ """
+ if not check_feature(feature):
+ return None
+
+ module, flag, ver = features[feature]
+
+ if ver is None:
+ return None
+
+ return getattr(__import__(module, fromlist=[ver]), ver)
+
+
+def get_supported_features():
+ """
+ :returns: A list of all supported features.
+ """
+ return [f for f in features if check_feature(f)]
+
+
+def check(feature):
+ """
+ :param feature: A module, codec, or feature name.
+ :returns:
+ ``True`` if the module, codec, or feature is available,
+ ``False`` or ``None`` otherwise.
+ """
+
+ if feature in modules:
+ return check_module(feature)
+ if feature in codecs:
+ return check_codec(feature)
+ if feature in features:
+ return check_feature(feature)
+ warnings.warn(f"Unknown feature '{feature}'.", stacklevel=2)
+ return False
+
+
+def version(feature):
+ """
+ :param feature:
+ The module, codec, or feature to check for.
+ :returns:
+ The version number as a string, or ``None`` if unknown or not available.
+ """
+ if feature in modules:
+ return version_module(feature)
+ if feature in codecs:
+ return version_codec(feature)
+ if feature in features:
+ return version_feature(feature)
+ return None
+
+
+def get_supported():
+ """
+ :returns: A list of all supported modules, features, and codecs.
+ """
+
+ ret = get_supported_modules()
+ ret.extend(get_supported_features())
+ ret.extend(get_supported_codecs())
+ return ret
+
+
+def pilinfo(out=None, supported_formats=True):
+ """
+ Prints information about this installation of Pillow.
+ This function can be called with ``python3 -m PIL``.
+
+ :param out:
+ The output stream to print to. Defaults to ``sys.stdout`` if ``None``.
+ :param supported_formats:
+ If ``True``, a list of all supported image file formats will be printed.
+ """
+
+ if out is None:
+ out = sys.stdout
+
+ Image.init()
+
+ print("-" * 68, file=out)
+ print(f"Pillow {PIL.__version__}", file=out)
+ py_version = sys.version.splitlines()
+ print(f"Python {py_version[0].strip()}", file=out)
+ for py_version in py_version[1:]:
+ print(f" {py_version.strip()}", file=out)
+ print("-" * 68, file=out)
+ print(
+ f"Python modules loaded from {os.path.dirname(Image.__file__)}",
+ file=out,
+ )
+ print(
+ f"Binary modules loaded from {os.path.dirname(Image.core.__file__)}",
+ file=out,
+ )
+ print("-" * 68, file=out)
+
+ for name, feature in [
+ ("pil", "PIL CORE"),
+ ("tkinter", "TKINTER"),
+ ("freetype2", "FREETYPE2"),
+ ("littlecms2", "LITTLECMS2"),
+ ("webp", "WEBP"),
+ ("transp_webp", "WEBP Transparency"),
+ ("webp_mux", "WEBPMUX"),
+ ("webp_anim", "WEBP Animation"),
+ ("jpg", "JPEG"),
+ ("jpg_2000", "OPENJPEG (JPEG2000)"),
+ ("zlib", "ZLIB (PNG/ZIP)"),
+ ("libtiff", "LIBTIFF"),
+ ("raqm", "RAQM (Bidirectional Text)"),
+ ("libimagequant", "LIBIMAGEQUANT (Quantization method)"),
+ ("xcb", "XCB (X protocol)"),
+ ]:
+ if check(name):
+ if name == "jpg" and check_feature("libjpeg_turbo"):
+ v = "libjpeg-turbo " + version_feature("libjpeg_turbo")
+ else:
+ v = version(name)
+ if v is not None:
+ version_static = name in ("pil", "jpg")
+ if name == "littlecms2":
+ # this check is also in src/_imagingcms.c:setup_module()
+ version_static = tuple(int(x) for x in v.split(".")) < (2, 7)
+ t = "compiled for" if version_static else "loaded"
+ if name == "raqm":
+ for f in ("fribidi", "harfbuzz"):
+ v2 = version_feature(f)
+ if v2 is not None:
+ v += f", {f} {v2}"
+ print("---", feature, "support ok,", t, v, file=out)
+ else:
+ print("---", feature, "support ok", file=out)
+ else:
+ print("***", feature, "support not installed", file=out)
+ print("-" * 68, file=out)
+
+ if supported_formats:
+ extensions = collections.defaultdict(list)
+ for ext, i in Image.EXTENSION.items():
+ extensions[i].append(ext)
+
+ for i in sorted(Image.ID):
+ line = f"{i}"
+ if i in Image.MIME:
+ line = f"{line} {Image.MIME[i]}"
+ print(line, file=out)
+
+ if i in extensions:
+ print(
+ "Extensions: {}".format(", ".join(sorted(extensions[i]))), file=out
+ )
+
+ features = []
+ if i in Image.OPEN:
+ features.append("open")
+ if i in Image.SAVE:
+ features.append("save")
+ if i in Image.SAVE_ALL:
+ features.append("save_all")
+ if i in Image.DECODERS:
+ features.append("decode")
+ if i in Image.ENCODERS:
+ features.append("encode")
+
+ print("Features: {}".format(", ".join(features)), file=out)
+ print("-" * 68, file=out)
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/LICENSE b/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/LICENSE
new file mode 100644
index 00000000..9eeca16f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/LICENSE
@@ -0,0 +1,731 @@
+The Python Imaging Library (PIL) is
+
+ Copyright © 1997-2011 by Secret Labs AB
+ Copyright © 1995-2011 by Fredrik Lundh
+
+Pillow is the friendly PIL fork. It is
+
+ Copyright © 2010-2023 by Jeffrey A. Clark (Alex) and contributors.
+
+Like PIL, Pillow is licensed under the open source HPND License:
+
+By obtaining, using, and/or copying this software and/or its associated
+documentation, you agree that you have read, understood, and will comply
+with the following terms and conditions:
+
+Permission to use, copy, modify and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appears in all copies, and that
+both that copyright notice and this permission notice appear in supporting
+documentation, and that the name of Secret Labs AB or the author not be
+used in advertising or publicity pertaining to distribution of the software
+without specific, written prior permission.
+
+SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
+SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS.
+IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR BE LIABLE FOR ANY SPECIAL,
+INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
+
+
+----
+
+BROTLI
+
+Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+
+----
+
+BZIP2
+
+
+--------------------------------------------------------------------------
+
+This program, "bzip2", the associated library "libbzip2", and all
+documentation, are copyright (C) 1996-2019 Julian R Seward. All
+rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. The origin of this software must not be misrepresented; you must
+ not claim that you wrote the original software. If you use this
+ software in a product, an acknowledgment in the product
+ documentation would be appreciated but is not required.
+
+3. Altered source versions must be plainly marked as such, and must
+ not be misrepresented as being the original software.
+
+4. The name of the author may not be used to endorse or promote
+ products derived from this software without specific prior written
+ permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
+OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
+DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
+GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+Julian Seward, jseward@acm.org
+bzip2/libbzip2 version 1.0.8 of 13 July 2019
+
+--------------------------------------------------------------------------
+
+
+----
+
+FREETYPE2
+
+The FreeType 2 font engine is copyrighted work and cannot be used
+legally without a software license. In order to make this project
+usable to a vast majority of developers, we distribute it under two
+mutually exclusive open-source licenses.
+
+This means that *you* must choose *one* of the two licenses described
+below, then obey all its terms and conditions when using FreeType 2 in
+any of your projects or products.
+
+ - The FreeType License, found in the file `docs/FTL.TXT`, which is
+ similar to the original BSD license *with* an advertising clause
+ that forces you to explicitly cite the FreeType project in your
+ product's documentation. All details are in the license file.
+ This license is suited to products which don't use the GNU General
+ Public License.
+
+ Note that this license is compatible to the GNU General Public
+ License version 3, but not version 2.
+
+ - The GNU General Public License version 2, found in
+ `docs/GPLv2.TXT` (any later version can be used also), for
+ programs which already use the GPL. Note that the FTL is
+ incompatible with GPLv2 due to its advertisement clause.
+
+The contributed BDF and PCF drivers come with a license similar to
+that of the X Window System. It is compatible to the above two
+licenses (see files `src/bdf/README` and `src/pcf/README`). The same
+holds for the source code files `src/base/fthash.c` and
+`include/freetype/internal/fthash.h`; they were part of the BDF driver
+in earlier FreeType versions.
+
+The gzip module uses the zlib license (see `src/gzip/zlib.h`) which
+too is compatible to the above two licenses.
+
+The files `src/autofit/ft-hb.c` and `src/autofit/ft-hb.h` contain code
+taken almost verbatim from the HarfBuzz file `hb-ft.cc`, which uses
+the 'Old MIT' license, compatible to the above two licenses.
+
+The MD5 checksum support (only used for debugging in development
+builds) is in the public domain.
+
+
+----
+
+HARFBUZZ
+
+HarfBuzz is licensed under the so-called "Old MIT" license. Details follow.
+For parts of HarfBuzz that are licensed under different licenses see individual
+files names COPYING in subdirectories where applicable.
+
+Copyright © 2010-2022 Google, Inc.
+Copyright © 2015-2020 Ebrahim Byagowi
+Copyright © 2019,2020 Facebook, Inc.
+Copyright © 2012,2015 Mozilla Foundation
+Copyright © 2011 Codethink Limited
+Copyright © 2008,2010 Nokia Corporation and/or its subsidiary(-ies)
+Copyright © 2009 Keith Stribley
+Copyright © 2011 Martin Hosken and SIL International
+Copyright © 2007 Chris Wilson
+Copyright © 2005,2006,2020,2021,2022,2023 Behdad Esfahbod
+Copyright © 2004,2007,2008,2009,2010,2013,2021,2022,2023 Red Hat, Inc.
+Copyright © 1998-2005 David Turner and Werner Lemberg
+Copyright © 2016 Igalia S.L.
+Copyright © 2022 Matthias Clasen
+Copyright © 2018,2021 Khaled Hosny
+Copyright © 2018,2019,2020 Adobe, Inc
+Copyright © 2013-2015 Alexei Podtelezhnikov
+
+For full copyright notices consult the individual files in the package.
+
+
+Permission is hereby granted, without written agreement and without
+license or royalty fees, to use, copy, modify, and distribute this
+software and its documentation for any purpose, provided that the
+above copyright notice and the following two paragraphs appear in
+all copies of this software.
+
+IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGE.
+
+THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
+ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+
+----
+
+LCMS2
+
+Little CMS
+Copyright (c) 1998-2020 Marti Maria Saguer
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+----
+
+LIBJPEG
+
+1. We don't promise that this software works. (But if you find any bugs,
+ please let us know!)
+2. You can use this software for whatever you want. You don't have to pay us.
+3. You may not pretend that you wrote this software. If you use it in a
+ program, you must acknowledge somewhere in your documentation that
+ you've used the IJG code.
+
+In legalese:
+
+The authors make NO WARRANTY or representation, either express or implied,
+with respect to this software, its quality, accuracy, merchantability, or
+fitness for a particular purpose. This software is provided "AS IS", and you,
+its user, assume the entire risk as to its quality and accuracy.
+
+This software is copyright (C) 1991-2020, Thomas G. Lane, Guido Vollbeding.
+All Rights Reserved except as specified below.
+
+Permission is hereby granted to use, copy, modify, and distribute this
+software (or portions thereof) for any purpose, without fee, subject to these
+conditions:
+(1) If any part of the source code for this software is distributed, then this
+README file must be included, with this copyright and no-warranty notice
+unaltered; and any additions, deletions, or changes to the original files
+must be clearly indicated in accompanying documentation.
+(2) If only executable code is distributed, then the accompanying
+documentation must state that "this software is based in part on the work of
+the Independent JPEG Group".
+(3) Permission for use of this software is granted only if the user accepts
+full responsibility for any undesirable consequences; the authors accept
+NO LIABILITY for damages of any kind.
+
+These conditions apply to any software derived from or based on the IJG code,
+not just to the unmodified library. If you use our work, you ought to
+acknowledge us.
+
+Permission is NOT granted for the use of any IJG author's name or company name
+in advertising or publicity relating to this software or products derived from
+it. This software may be referred to only as "the Independent JPEG Group's
+software".
+
+We specifically permit and encourage the use of this software as the basis of
+commercial products, provided that all warranty or liability claims are
+assumed by the product vendor.
+
+
+----
+
+LIBLZMA
+
+XZ Utils Licensing
+==================
+
+ Different licenses apply to different files in this package. Here
+ is a rough summary of which licenses apply to which parts of this
+ package (but check the individual files to be sure!):
+
+ - liblzma is in the public domain.
+
+ - xz, xzdec, and lzmadec command line tools are in the public
+ domain unless GNU getopt_long had to be compiled and linked
+ in from the lib directory. The getopt_long code is under
+ GNU LGPLv2.1+.
+
+ - The scripts to grep, diff, and view compressed files have been
+ adapted from gzip. These scripts and their documentation are
+ under GNU GPLv2+.
+
+ - All the documentation in the doc directory and most of the
+ XZ Utils specific documentation files in other directories
+ are in the public domain.
+
+ - Translated messages are in the public domain.
+
+ - The build system contains public domain files, and files that
+ are under GNU GPLv2+ or GNU GPLv3+. None of these files end up
+ in the binaries being built.
+
+ - Test files and test code in the tests directory, and debugging
+ utilities in the debug directory are in the public domain.
+
+ - The extra directory may contain public domain files, and files
+ that are under various free software licenses.
+
+ You can do whatever you want with the files that have been put into
+ the public domain. If you find public domain legally problematic,
+ take the previous sentence as a license grant. If you still find
+ the lack of copyright legally problematic, you have too many
+ lawyers.
+
+ As usual, this software is provided "as is", without any warranty.
+
+ If you copy significant amounts of public domain code from XZ Utils
+ into your project, acknowledging this somewhere in your software is
+ polite (especially if it is proprietary, non-free software), but
+ naturally it is not legally required. Here is an example of a good
+ notice to put into "about box" or into documentation:
+
+ This software includes code from XZ Utils .
+
+ The following license texts are included in the following files:
+ - COPYING.LGPLv2.1: GNU Lesser General Public License version 2.1
+ - COPYING.GPLv2: GNU General Public License version 2
+ - COPYING.GPLv3: GNU General Public License version 3
+
+ Note that the toolchain (compiler, linker etc.) may add some code
+ pieces that are copyrighted. Thus, it is possible that e.g. liblzma
+ binary wouldn't actually be in the public domain in its entirety
+ even though it contains no copyrighted code from the XZ Utils source
+ package.
+
+ If you have questions, don't hesitate to ask the author(s) for more
+ information.
+
+
+----
+
+LIBPNG
+
+COPYRIGHT NOTICE, DISCLAIMER, and LICENSE
+=========================================
+
+PNG Reference Library License version 2
+---------------------------------------
+
+ * Copyright (c) 1995-2022 The PNG Reference Library Authors.
+ * Copyright (c) 2018-2022 Cosmin Truta.
+ * Copyright (c) 2000-2002, 2004, 2006-2018 Glenn Randers-Pehrson.
+ * Copyright (c) 1996-1997 Andreas Dilger.
+ * Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc.
+
+The software is supplied "as is", without warranty of any kind,
+express or implied, including, without limitation, the warranties
+of merchantability, fitness for a particular purpose, title, and
+non-infringement. In no event shall the Copyright owners, or
+anyone distributing the software, be liable for any damages or
+other liability, whether in contract, tort or otherwise, arising
+from, out of, or in connection with the software, or the use or
+other dealings in the software, even if advised of the possibility
+of such damage.
+
+Permission is hereby granted to use, copy, modify, and distribute
+this software, or portions hereof, for any purpose, without fee,
+subject to the following restrictions:
+
+ 1. The origin of this software must not be misrepresented; you
+ must not claim that you wrote the original software. If you
+ use this software in a product, an acknowledgment in the product
+ documentation would be appreciated, but is not required.
+
+ 2. Altered source versions must be plainly marked as such, and must
+ not be misrepresented as being the original software.
+
+ 3. This Copyright notice may not be removed or altered from any
+ source or altered source distribution.
+
+
+PNG Reference Library License version 1 (for libpng 0.5 through 1.6.35)
+-----------------------------------------------------------------------
+
+libpng versions 1.0.7, July 1, 2000, through 1.6.35, July 15, 2018 are
+Copyright (c) 2000-2002, 2004, 2006-2018 Glenn Randers-Pehrson, are
+derived from libpng-1.0.6, and are distributed according to the same
+disclaimer and license as libpng-1.0.6 with the following individuals
+added to the list of Contributing Authors:
+
+ Simon-Pierre Cadieux
+ Eric S. Raymond
+ Mans Rullgard
+ Cosmin Truta
+ Gilles Vollant
+ James Yu
+ Mandar Sahastrabuddhe
+ Google Inc.
+ Vadim Barkov
+
+and with the following additions to the disclaimer:
+
+ There is no warranty against interference with your enjoyment of
+ the library or against infringement. There is no warranty that our
+ efforts or the library will fulfill any of your particular purposes
+ or needs. This library is provided with all faults, and the entire
+ risk of satisfactory quality, performance, accuracy, and effort is
+ with the user.
+
+Some files in the "contrib" directory and some configure-generated
+files that are distributed with libpng have other copyright owners, and
+are released under other open source licenses.
+
+libpng versions 0.97, January 1998, through 1.0.6, March 20, 2000, are
+Copyright (c) 1998-2000 Glenn Randers-Pehrson, are derived from
+libpng-0.96, and are distributed according to the same disclaimer and
+license as libpng-0.96, with the following individuals added to the
+list of Contributing Authors:
+
+ Tom Lane
+ Glenn Randers-Pehrson
+ Willem van Schaik
+
+libpng versions 0.89, June 1996, through 0.96, May 1997, are
+Copyright (c) 1996-1997 Andreas Dilger, are derived from libpng-0.88,
+and are distributed according to the same disclaimer and license as
+libpng-0.88, with the following individuals added to the list of
+Contributing Authors:
+
+ John Bowler
+ Kevin Bracey
+ Sam Bushell
+ Magnus Holmgren
+ Greg Roelofs
+ Tom Tanner
+
+Some files in the "scripts" directory have other copyright owners,
+but are released under this license.
+
+libpng versions 0.5, May 1995, through 0.88, January 1996, are
+Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc.
+
+For the purposes of this copyright and license, "Contributing Authors"
+is defined as the following set of individuals:
+
+ Andreas Dilger
+ Dave Martindale
+ Guy Eric Schalnat
+ Paul Schmidt
+ Tim Wegner
+
+The PNG Reference Library is supplied "AS IS". The Contributing
+Authors and Group 42, Inc. disclaim all warranties, expressed or
+implied, including, without limitation, the warranties of
+merchantability and of fitness for any purpose. The Contributing
+Authors and Group 42, Inc. assume no liability for direct, indirect,
+incidental, special, exemplary, or consequential damages, which may
+result from the use of the PNG Reference Library, even if advised of
+the possibility of such damage.
+
+Permission is hereby granted to use, copy, modify, and distribute this
+source code, or portions hereof, for any purpose, without fee, subject
+to the following restrictions:
+
+ 1. The origin of this source code must not be misrepresented.
+
+ 2. Altered versions must be plainly marked as such and must not
+ be misrepresented as being the original source.
+
+ 3. This Copyright notice may not be removed or altered from any
+ source or altered source distribution.
+
+The Contributing Authors and Group 42, Inc. specifically permit,
+without fee, and encourage the use of this source code as a component
+to supporting the PNG file format in commercial products. If you use
+this source code in a product, acknowledgment is not required but would
+be appreciated.
+
+
+----
+
+LIBTIFF
+
+Copyright (c) 1988-1997 Sam Leffler
+Copyright (c) 1991-1997 Silicon Graphics, Inc.
+
+Permission to use, copy, modify, distribute, and sell this software and
+its documentation for any purpose is hereby granted without fee, provided
+that (i) the above copyright notices and this permission notice appear in
+all copies of the software and related documentation, and (ii) the names of
+Sam Leffler and Silicon Graphics may not be used in any advertising or
+publicity relating to the software without the specific, prior written
+permission of Sam Leffler and Silicon Graphics.
+
+THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND,
+EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY
+WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
+
+IN NO EVENT SHALL SAM LEFFLER OR SILICON GRAPHICS BE LIABLE FOR
+ANY SPECIAL, INCIDENTAL, INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND,
+OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+WHETHER OR NOT ADVISED OF THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF
+LIABILITY, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
+OF THIS SOFTWARE.
+
+
+----
+
+LIBWEBP
+
+Copyright (c) 2010, Google Inc. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+
+ * Neither the name of Google nor the names of its contributors may
+ be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+----
+
+OPENJPEG
+
+*
+ * The copyright in this software is being made available under the 2-clauses
+ * BSD License, included below. This software may be subject to other third
+ * party and contributor rights, including patent rights, and no such rights
+ * are granted under this license.
+ *
+ * Copyright (c) 2002-2014, Universite catholique de Louvain (UCL), Belgium
+ * Copyright (c) 2002-2014, Professor Benoit Macq
+ * Copyright (c) 2003-2014, Antonin Descampe
+ * Copyright (c) 2003-2009, Francois-Olivier Devaux
+ * Copyright (c) 2005, Herve Drolon, FreeImage Team
+ * Copyright (c) 2002-2003, Yannick Verschueren
+ * Copyright (c) 2001-2003, David Janssens
+ * Copyright (c) 2011-2012, Centre National d'Etudes Spatiales (CNES), France
+ * Copyright (c) 2012, CS Systemes d'Information, France
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS `AS IS'
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+----
+
+RAQM
+
+The MIT License (MIT)
+
+Copyright © 2015 Information Technology Authority (ITA)
+Copyright © 2016 Khaled Hosny
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+
+----
+
+XAU
+
+Copyright 1988, 1993, 1994, 1998 The Open Group
+
+Permission to use, copy, modify, distribute, and sell this software and its
+documentation for any purpose is hereby granted without fee, provided that
+the above copyright notice appear in all copies and that both that
+copyright notice and this permission notice appear in supporting
+documentation.
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+OPEN GROUP BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
+AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Except as contained in this notice, the name of The Open Group shall not be
+used in advertising or otherwise to promote the sale, use or other dealings
+in this Software without prior written authorization from The Open Group.
+
+
+----
+
+XCB
+
+Copyright (C) 2001-2006 Bart Massey, Jamey Sharp, and Josh Triplett.
+All Rights Reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute,
+sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall
+be included in all copies or substantial portions of the
+Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
+
+Except as contained in this notice, the names of the authors
+or their institutions shall not be used in advertising or
+otherwise to promote the sale, use or other dealings in this
+Software without prior written authorization from the
+authors.
+
+
+----
+
+XDMCP
+
+Copyright 1989, 1998 The Open Group
+
+Permission to use, copy, modify, distribute, and sell this software and its
+documentation for any purpose is hereby granted without fee, provided that
+the above copyright notice appear in all copies and that both that
+copyright notice and this permission notice appear in supporting
+documentation.
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+OPEN GROUP BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
+AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Except as contained in this notice, the name of The Open Group shall not be
+used in advertising or otherwise to promote the sale, use or other dealings
+in this Software without prior written authorization from The Open Group.
+
+Author: Keith Packard, MIT X Consortium
+
+
+----
+
+ZLIB
+
+ (C) 1995-2017 Jean-loup Gailly and Mark Adler
+
+ This software is provided 'as-is', without any express or implied
+ warranty. In no event will the authors be held liable for any damages
+ arising from the use of this software.
+
+ Permission is granted to anyone to use this software for any purpose,
+ including commercial applications, and to alter it and redistribute it
+ freely, subject to the following restrictions:
+
+ 1. The origin of this software must not be misrepresented; you must not
+ claim that you wrote the original software. If you use this software
+ in a product, an acknowledgment in the product documentation would be
+ appreciated but is not required.
+ 2. Altered source versions must be plainly marked as such, and must not be
+ misrepresented as being the original software.
+ 3. This notice may not be removed or altered from any source distribution.
+
+ Jean-loup Gailly Mark Adler
+ jloup@gzip.org madler@alumni.caltech.edu
+
+If you use the zlib library in a product, we would appreciate *not* receiving
+lengthy legal documents to sign. The sources are provided for free but without
+warranty of any kind. The library has been entirely written by Jean-loup
+Gailly and Mark Adler; it does not include third-party code.
+
+If you redistribute modified sources, we would appreciate that you include in
+the file ChangeLog history information documenting your changes. Please read
+the FAQ for more information on the distribution of modified source versions.
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/METADATA
new file mode 100644
index 00000000..77359921
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/METADATA
@@ -0,0 +1,176 @@
+Metadata-Version: 2.1
+Name: Pillow
+Version: 10.1.0
+Summary: Python Imaging Library (Fork)
+Home-page: https://python-pillow.org
+Author: Jeffrey A. Clark (Alex)
+Author-email: aclark@aclark.net
+License: HPND
+Project-URL: Documentation, https://pillow.readthedocs.io
+Project-URL: Source, https://github.com/python-pillow/Pillow
+Project-URL: Funding, https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=pypi
+Project-URL: Release notes, https://pillow.readthedocs.io/en/stable/releasenotes/index.html
+Project-URL: Changelog, https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst
+Project-URL: Twitter, https://twitter.com/PythonPillow
+Project-URL: Mastodon, https://fosstodon.org/@pillow
+Keywords: Imaging
+Classifier: Development Status :: 6 - Mature
+Classifier: License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND)
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Multimedia :: Graphics
+Classifier: Topic :: Multimedia :: Graphics :: Capture :: Digital Camera
+Classifier: Topic :: Multimedia :: Graphics :: Capture :: Screen Capture
+Classifier: Topic :: Multimedia :: Graphics :: Graphics Conversion
+Classifier: Topic :: Multimedia :: Graphics :: Viewers
+Requires-Python: >=3.8
+Description-Content-Type: text/markdown
+License-File: LICENSE
+Provides-Extra: docs
+Requires-Dist: furo ; extra == 'docs'
+Requires-Dist: olefile ; extra == 'docs'
+Requires-Dist: sphinx >=2.4 ; extra == 'docs'
+Requires-Dist: sphinx-copybutton ; extra == 'docs'
+Requires-Dist: sphinx-inline-tabs ; extra == 'docs'
+Requires-Dist: sphinx-removed-in ; extra == 'docs'
+Requires-Dist: sphinxext-opengraph ; extra == 'docs'
+Provides-Extra: tests
+Requires-Dist: check-manifest ; extra == 'tests'
+Requires-Dist: coverage ; extra == 'tests'
+Requires-Dist: defusedxml ; extra == 'tests'
+Requires-Dist: markdown2 ; extra == 'tests'
+Requires-Dist: olefile ; extra == 'tests'
+Requires-Dist: packaging ; extra == 'tests'
+Requires-Dist: pyroma ; extra == 'tests'
+Requires-Dist: pytest ; extra == 'tests'
+Requires-Dist: pytest-cov ; extra == 'tests'
+Requires-Dist: pytest-timeout ; extra == 'tests'
+
+
+
+
+
+# Pillow
+
+## Python Imaging Library (Fork)
+
+Pillow is the friendly PIL fork by [Jeffrey A. Clark (Alex) and
+contributors](https://github.com/python-pillow/Pillow/graphs/contributors).
+PIL is the Python Imaging Library by Fredrik Lundh and Contributors.
+As of 2019, Pillow development is
+[supported by Tidelift](https://tidelift.com/subscription/pkg/pypi-pillow?utm_source=pypi-pillow&utm_medium=readme&utm_campaign=enterprise).
+
+
+
+ | docs |
+
+
+ |
+
+
+ | tests |
+
+
+
+
+
+
+
+
+
+
+
+
+ |
+
+
+ | package |
+
+
+
+
+
+
+ |
+
+
+ | social |
+
+
+
+
+ |
+
+
+
+## Overview
+
+The Python Imaging Library adds image processing capabilities to your Python interpreter.
+
+This library provides extensive file format support, an efficient internal representation, and fairly powerful image processing capabilities.
+
+The core image library is designed for fast access to data stored in a few basic pixel formats. It should provide a solid foundation for a general image processing tool.
+
+## More Information
+
+- [Documentation](https://pillow.readthedocs.io/)
+ - [Installation](https://pillow.readthedocs.io/en/latest/installation.html)
+ - [Handbook](https://pillow.readthedocs.io/en/latest/handbook/index.html)
+- [Contribute](https://github.com/python-pillow/Pillow/blob/main/.github/CONTRIBUTING.md)
+ - [Issues](https://github.com/python-pillow/Pillow/issues)
+ - [Pull requests](https://github.com/python-pillow/Pillow/pulls)
+- [Release notes](https://pillow.readthedocs.io/en/stable/releasenotes/index.html)
+- [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst)
+ - [Pre-fork](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst#pre-fork)
+
+## Report a Vulnerability
+
+To report a security vulnerability, please follow the procedure described in the [Tidelift security policy](https://tidelift.com/docs/security).
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/RECORD
new file mode 100644
index 00000000..42596875
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/RECORD
@@ -0,0 +1,219 @@
+PIL/BdfFontFile.py,sha256=wI1cUhBUtYZZLv7MrNGvuEx25OYEEDvZXP_dysUUCTk,3237
+PIL/BlpImagePlugin.py,sha256=kJI8_9KmJGFg8n8ghnG3V64RQ1R0oJXjwbfuQFOgIP0,15534
+PIL/BmpImagePlugin.py,sha256=bH5LEmjo8zSfNOTI_yeuFqBWE_s9KZJekLRmaFhQNIU,17672
+PIL/BufrStubImagePlugin.py,sha256=Bz4tiUJDoVlCUKiLfciimSoi__Pv5byF1AtCIlKcQU8,1557
+PIL/ContainerIO.py,sha256=1U15zUXjWO8uWK-MyCp66Eh7djQEU-oUeCDoBqewNkA,2883
+PIL/CurImagePlugin.py,sha256=aLLyY94iXRjiaBqmSEQwuLMsCX4vTVi-FNa7gKzkzmU,1721
+PIL/DcxImagePlugin.py,sha256=HHSoW6E2wTJ21XLDm9Ueip9a5dizMXr_A9OonxvgGsY,1958
+PIL/DdsImagePlugin.py,sha256=bGAV8GNhdRZAECeB1ogIrQVmTzm6uMc6buEeFTBAES8,9590
+PIL/EpsImagePlugin.py,sha256=6NTgCw6R0femmv505FHBxioBE8Sfy7YRDeHARHin_Kw,15916
+PIL/ExifTags.py,sha256=bzD8J9y_qWVU0TDYzmpFa_TescH4tZGb0Qps8ZTJIJA,9718
+PIL/FitsImagePlugin.py,sha256=p6ChMrXPj7ew5X7SYscVr6le6HxBz--0EDZ1tRJ6bO8,2063
+PIL/FliImagePlugin.py,sha256=1HT_4ZXOHkokLpp6qXjRgJasVHOTrM536yNIsamGshY,4444
+PIL/FontFile.py,sha256=0RmRsczPgH8vKeLg6L2fGRMXobK5FqImTXAs0nfnR7I,2764
+PIL/FpxImagePlugin.py,sha256=yz8VeWEAVC_FGtC-pWZjrfZKoZjV7_2fqcD9q9l0L8s,6962
+PIL/FtexImagePlugin.py,sha256=FWALHTA09Gks6pW5LmhkxQ3hDuJLn6JJF38s4WB0_MM,3430
+PIL/GbrImagePlugin.py,sha256=R_Kf5SGstxulTpLVLS9hT4l1RlBziBIEAezmQm_cEHk,2910
+PIL/GdImageFile.py,sha256=7_SW_RM_cACDpMnTGVr2gBcIEDXOvwJT1G8JhfVkosk,2608
+PIL/GifImagePlugin.py,sha256=MT3013exWMnrCFBergtWUSxO6ONRfhbiKV7qiSu4cXI,35661
+PIL/GimpGradientFile.py,sha256=XmzOVRuhvVR1jh9pIfwvvJ6IfTau5_wG9hFfPsKNfjw,3396
+PIL/GimpPaletteFile.py,sha256=_kfSY0tbaxJO97QWKB597LhZGpVZZAfeUGPhtxmBiwc,1345
+PIL/GribStubImagePlugin.py,sha256=-6s2kPUm2FuUahf-cNWchREA0psXW20MWDHMfavddtk,1551
+PIL/Hdf5StubImagePlugin.py,sha256=6QuhJLscoFQjS6rlUFAJgCp1zjtRz7NPWtkZkSAYGMk,1554
+PIL/IcnsImagePlugin.py,sha256=BlZOJsHbUCpuifrysO_KDJUQ_B3TeZa1w_h7fI_vOFg,11928
+PIL/IcoImagePlugin.py,sha256=h0QRrbFnSCYw5JuPvwzoApcVQEEeR9OwWvYMorzxIPA,11623
+PIL/ImImagePlugin.py,sha256=8Znnc0wp4fdSIttHTWGHTBwD_WakTCwMD_Z4LDBKaPU,10870
+PIL/Image.py,sha256=4GiRSxQDhDA4bYcrYVEKvYsxoENXHv4S3wdsGTZyqC8,134251
+PIL/ImageChops.py,sha256=7mZC-G8OS3NbJxv1UzibqNmJSNBDwJ629gmpaupBcXk,7003
+PIL/ImageCms.py,sha256=WzjCn04HsHg6RyLMQK3oRf9_A6dgaCdl2iP_FTzza-M,37171
+PIL/ImageColor.py,sha256=hPgYZQnxaVYzz2TTJfhf8qoobWYanrFFNWBFsv3ypNQ,9084
+PIL/ImageDraw.py,sha256=Y5nQar5yYZ4h2Q1BorVAvPwIIdF37PxhwUxbHY0gq-s,36344
+PIL/ImageDraw2.py,sha256=0sm4-D8qolpzOhQzT1Z4CDXeWR4O7n1eDWkxSWql4RU,5501
+PIL/ImageEnhance.py,sha256=CJnCouiBmxN2fE0xW7m_uMdBqcm-Fp0S3ruHhkygal4,3190
+PIL/ImageFile.py,sha256=b0s8wpaEMOxLNyURdzRfFKd5VRdvqI2_xUuHI63JotI,23539
+PIL/ImageFilter.py,sha256=BqMHXt9Zw1kTFrdE0w37afNMug9_gxWczbgINgNmnrc,17141
+PIL/ImageFont.py,sha256=UQq4aOOyFOdfUCptABHJp5-i512d3pioocoKECIs0YM,59986
+PIL/ImageGrab.py,sha256=lQ7IvYUI3KqszsayKYdno0zSCIRN-koFezQcno9JW14,5595
+PIL/ImageMath.py,sha256=W181r_IkejDRx54jgBDCu_mNLSUpGQFNbPPGIzFzV8o,7357
+PIL/ImageMode.py,sha256=ToNN9DhhnUijbolcYpHVoqNt3wpFVas77NfhxoFzUvo,2914
+PIL/ImageMorph.py,sha256=PWS1-d4WgiWZxLJ_SyhvvgTDXIFS8DlvUpxLWlw9yUU,7977
+PIL/ImageOps.py,sha256=dkWVIm7vI2n-TxyafpwnWCyKavciCofaMpEpluhQM-I,22477
+PIL/ImagePalette.py,sha256=zqnIJFY1Eyirk2y76ocesuVtuabL8OfAo-aw6oWPmPA,7908
+PIL/ImagePath.py,sha256=lVmH1-lCd0SyrFoqyhlstAFW2iJuC14fPcW8iewvxCQ,336
+PIL/ImageQt.py,sha256=zDkvdqm3adlrV1nxF2r9uCBUsXcX6YU9tbqK4FrKylA,6366
+PIL/ImageSequence.py,sha256=4vey85AWprZthqCFEqpaH-65WIP5OSjHTS6gH2c50y4,1872
+PIL/ImageShow.py,sha256=YCHA6sP0TBnVi5iN-eyMKJERHBpq-gkWWEYcLvr8zlU,8308
+PIL/ImageStat.py,sha256=GkE2NEsd6c5QrDlvIOt5xUEqSZppHzQ_4tub66Dervg,3924
+PIL/ImageTk.py,sha256=Hkepk-26IFTdm2U7MNWMs-HqD0AUjTW6HI9oKS63uN0,8461
+PIL/ImageTransform.py,sha256=oO7Ir7j_5r4DeoZ-ZgqW9FO099cP2gHdE32SQdfmW_s,2883
+PIL/ImageWin.py,sha256=1MQBJS7tVrQzI9jN0nmeNeFpIaq8fXra9kQocHkiFxM,7191
+PIL/ImtImagePlugin.py,sha256=lwNHVEPNhT_xeW0QtrvWFOJwNhJCDAnTxgzD23MeHcQ,2580
+PIL/IptcImagePlugin.py,sha256=w2Xjyk_0rZZ-vOJHwnJ9kVN53ZRxrQs4oUI-S3wF_d4,5812
+PIL/Jpeg2KImagePlugin.py,sha256=H1CxbxXdxv5GX5F7FmJp6QO8GEM5-KU2xcALHaNc1vA,11585
+PIL/JpegImagePlugin.py,sha256=T6DFt7vRhTF5xMUEAFQeOLpzDso_EmMbCu5ZDWGEAMM,29347
+PIL/JpegPresets.py,sha256=ICr_9Xeh2FoY5vMpNbPbs3HdmQfhr3C7uyL1PE7BShk,12343
+PIL/McIdasImagePlugin.py,sha256=DXZFGd9h2RmaCHJKz-rmBx7n5myuCd0-sh7rM586qa4,1797
+PIL/MicImagePlugin.py,sha256=HP_LCGqEiFKF-Km6-_VHJINtA6NleUrg78RCwnLE9Xc,2514
+PIL/MpegImagePlugin.py,sha256=hauuK6YMJXhQS9K-hgsjRN_WGdt9WXT2TtjYaj-Od94,1824
+PIL/MpoImagePlugin.py,sha256=ivSlGji16r7D7lx6nFpdtdu7LnkSj9XeDKEuIOs9AwE,6289
+PIL/MspImagePlugin.py,sha256=vWBPIUqN9B1_XHXU9r2SMS7fcsdIM0zHA9HW2juiH_4,5613
+PIL/PSDraw.py,sha256=96uX-it9Ey3Vm5Okhiv7ScgU0G4AtVKIlTAEMAszp1E,6525
+PIL/PaletteFile.py,sha256=EoUcTJ8iwiSVNE03skj5y5qpZhXlFi6mTu3bqxQorMg,1128
+PIL/PalmImagePlugin.py,sha256=-dAS8BfzSCQfEhrBBFdGQdNB6pF68QVpfMGRgyssDmU,9144
+PIL/PcdImagePlugin.py,sha256=XyqyHTEDObNF1bRjLPdCu0Gi1wSZ0lzE8VQ583145LY,1497
+PIL/PcfFontFile.py,sha256=YS0xp_pKvfBhvcEsOljPbwsFDUwSf6ZU9JmuSwY8OrY,6757
+PIL/PcxImagePlugin.py,sha256=9ztYFAvLwixbBD6HRhIPus7-Hgyu7aHicZ1041OAhLI,6022
+PIL/PdfImagePlugin.py,sha256=f86-CMKZKb3xX6eMH-l247QbwGkTSOXhvKZZ3f59k8o,8788
+PIL/PdfParser.py,sha256=qITSjToIONKh7j3LvsQ2-iCsgb_d2de0LgeahwNOmEY,34401
+PIL/PixarImagePlugin.py,sha256=uvgJrwDSRHxpYN_WaDyjkWrwKV03UBodjUWjQK5cCCQ,1652
+PIL/PngImagePlugin.py,sha256=p5psQesMpo-GQND-d6snrcUFVZhfK438z691B6YLYQ0,46225
+PIL/PpmImagePlugin.py,sha256=a5p5Y-6Ho__t5MkOsCjRFXGwnxbYouuJNqcvNx12NU0,11402
+PIL/PsdImagePlugin.py,sha256=06SCYlVINjw1Zl2Meyi_Z3ZqUahIzulJ9pdggUwqnA8,7537
+PIL/PyAccess.py,sha256=WHfPT4Rs_rrcGTZ8PlBcNNy3MclkbtYXNFObjvr-XIU,9898
+PIL/QoiImagePlugin.py,sha256=1R2ygHdxARrdgEo-LtZ_e_ttulyRl3wM_dBwTha_8dY,3635
+PIL/SgiImagePlugin.py,sha256=kdY_yFYWBdcUHT9WH4b_2RRYp83FLpoMBu8R6Xxx_AY,6179
+PIL/SpiderImagePlugin.py,sha256=mBscfD6-M_zT-ZBP1x3MaYsQ6G4ES2IaJpJqmn3JtfA,9471
+PIL/SunImagePlugin.py,sha256=3c8HAw2CwbbL2x7sXyxjehXHdT03jx5LO-cQFdkDCg8,4406
+PIL/TarIO.py,sha256=1zNNZmPtgI7ZQ3yCSJufh9SkUTfJ5msEf6qNdyi1PKQ,1491
+PIL/TgaImagePlugin.py,sha256=_v_7HNqVfK35m101Ai-mR7wIileJESt7cqCHJASZxFI,6581
+PIL/TiffImagePlugin.py,sha256=4lBabX0HtnL3ObZKKFAqLbfYxKeQzGGxz_z8L3rouaI,76692
+PIL/TiffTags.py,sha256=d7b3bnEhSkiG2W9DzReGE4zbcdAogznonqLmD3HEJkc,16814
+PIL/WalImageFile.py,sha256=zi4BLE0yJCqXhz-HmEAqZW1zQdOHiwXz5aaqxQ0oBes,5520
+PIL/WebPImagePlugin.py,sha256=gTPDE2QpedvjnGLov4XfDgHUFlaz01rlTFZcVBaG9pA,11240
+PIL/WmfImagePlugin.py,sha256=CDjxEzGxrYU25FTGiUpxQ0MyAsTGih-mrYVV4RYR4gE,4691
+PIL/XVThumbImagePlugin.py,sha256=otsgVWmu8pQl6hmt-FMlwqqSReGbB8xP2sJCbHC4R58,1987
+PIL/XbmImagePlugin.py,sha256=wWk0nIAjU78QRUsdT0V6rCWvNrFQdH30D_FF5iTypxQ,2488
+PIL/XpmImagePlugin.py,sha256=z9bjqacJ32C5V1gicEm2gHmjI-zxsg8FF82qok12pcs,3185
+PIL/__init__.py,sha256=I4crowTvU4ZZLI15VbrhhxVJhSSKJqn1MHcLLXPbLns,1979
+PIL/__main__.py,sha256=axR7PO-HtXp-o0rBhKIxs0wark0rBfaDIhAIWqtWUo4,41
+PIL/__pycache__/BdfFontFile.cpython-312.pyc,,
+PIL/__pycache__/BlpImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/BmpImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/BufrStubImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/ContainerIO.cpython-312.pyc,,
+PIL/__pycache__/CurImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/DcxImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/DdsImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/EpsImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/ExifTags.cpython-312.pyc,,
+PIL/__pycache__/FitsImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/FliImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/FontFile.cpython-312.pyc,,
+PIL/__pycache__/FpxImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/FtexImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/GbrImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/GdImageFile.cpython-312.pyc,,
+PIL/__pycache__/GifImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/GimpGradientFile.cpython-312.pyc,,
+PIL/__pycache__/GimpPaletteFile.cpython-312.pyc,,
+PIL/__pycache__/GribStubImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/Hdf5StubImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/IcnsImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/IcoImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/ImImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/Image.cpython-312.pyc,,
+PIL/__pycache__/ImageChops.cpython-312.pyc,,
+PIL/__pycache__/ImageCms.cpython-312.pyc,,
+PIL/__pycache__/ImageColor.cpython-312.pyc,,
+PIL/__pycache__/ImageDraw.cpython-312.pyc,,
+PIL/__pycache__/ImageDraw2.cpython-312.pyc,,
+PIL/__pycache__/ImageEnhance.cpython-312.pyc,,
+PIL/__pycache__/ImageFile.cpython-312.pyc,,
+PIL/__pycache__/ImageFilter.cpython-312.pyc,,
+PIL/__pycache__/ImageFont.cpython-312.pyc,,
+PIL/__pycache__/ImageGrab.cpython-312.pyc,,
+PIL/__pycache__/ImageMath.cpython-312.pyc,,
+PIL/__pycache__/ImageMode.cpython-312.pyc,,
+PIL/__pycache__/ImageMorph.cpython-312.pyc,,
+PIL/__pycache__/ImageOps.cpython-312.pyc,,
+PIL/__pycache__/ImagePalette.cpython-312.pyc,,
+PIL/__pycache__/ImagePath.cpython-312.pyc,,
+PIL/__pycache__/ImageQt.cpython-312.pyc,,
+PIL/__pycache__/ImageSequence.cpython-312.pyc,,
+PIL/__pycache__/ImageShow.cpython-312.pyc,,
+PIL/__pycache__/ImageStat.cpython-312.pyc,,
+PIL/__pycache__/ImageTk.cpython-312.pyc,,
+PIL/__pycache__/ImageTransform.cpython-312.pyc,,
+PIL/__pycache__/ImageWin.cpython-312.pyc,,
+PIL/__pycache__/ImtImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/IptcImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/Jpeg2KImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/JpegImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/JpegPresets.cpython-312.pyc,,
+PIL/__pycache__/McIdasImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/MicImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/MpegImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/MpoImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/MspImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/PSDraw.cpython-312.pyc,,
+PIL/__pycache__/PaletteFile.cpython-312.pyc,,
+PIL/__pycache__/PalmImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/PcdImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/PcfFontFile.cpython-312.pyc,,
+PIL/__pycache__/PcxImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/PdfImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/PdfParser.cpython-312.pyc,,
+PIL/__pycache__/PixarImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/PngImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/PpmImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/PsdImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/PyAccess.cpython-312.pyc,,
+PIL/__pycache__/QoiImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/SgiImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/SpiderImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/SunImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/TarIO.cpython-312.pyc,,
+PIL/__pycache__/TgaImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/TiffImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/TiffTags.cpython-312.pyc,,
+PIL/__pycache__/WalImageFile.cpython-312.pyc,,
+PIL/__pycache__/WebPImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/WmfImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/XVThumbImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/XbmImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/XpmImagePlugin.cpython-312.pyc,,
+PIL/__pycache__/__init__.cpython-312.pyc,,
+PIL/__pycache__/__main__.cpython-312.pyc,,
+PIL/__pycache__/_binary.cpython-312.pyc,,
+PIL/__pycache__/_deprecate.cpython-312.pyc,,
+PIL/__pycache__/_tkinter_finder.cpython-312.pyc,,
+PIL/__pycache__/_util.cpython-312.pyc,,
+PIL/__pycache__/_version.cpython-312.pyc,,
+PIL/__pycache__/features.cpython-312.pyc,,
+PIL/_binary.py,sha256=E5qhxNJ7hhbEoqu0mODOXHT8z-FDRShXG3jTJhsDdas,2043
+PIL/_deprecate.py,sha256=iFhNhOQ_OEFvD3x4NE4_MEsnzO3Wdl-fzV6AOe4s_3I,1936
+PIL/_imaging.cpython-312-x86_64-linux-gnu.so,sha256=ALZMrG0s7MC-beDOPTnxWfUQJ4zI4DK0bzk6zs5p31g,719113
+PIL/_imagingcms.cpython-312-x86_64-linux-gnu.so,sha256=mWGPxKwbcAGwJ511L2ixspvZ-AX-lcuPBLDS24M0NDU,47121
+PIL/_imagingft.cpython-312-x86_64-linux-gnu.so,sha256=mmk2_xG2QQUxPUb87eYmYMTz29ZXyyJi0F_4EOQOEdk,77065
+PIL/_imagingmath.cpython-312-x86_64-linux-gnu.so,sha256=q_-qacUT40EH-UMeQhwgUlXzhX2rdbzUKmCn4RJ6Ej8,31344
+PIL/_imagingmorph.cpython-312-x86_64-linux-gnu.so,sha256=fpSFTDwmBrksAvJPQ2aGFfqJIKNuDIT1l9bbFicP96c,14992
+PIL/_imagingtk.cpython-312-x86_64-linux-gnu.so,sha256=XkQnDsI-BMeLKV8dQeN43PpzOzZJMJEdeIW3Wf8vMxM,14992
+PIL/_tkinter_finder.py,sha256=PApqlh4yEhsM99fojTtsqNmgL0v_9qRFEqqRJYlY74c,503
+PIL/_util.py,sha256=7897Hlb76Da6zwBXnh4ASp-DOw_1dgc2HoZZ-9FTWaQ,369
+PIL/_version.py,sha256=gZKWGpyRARzPJfviSNVZQJSPbGwba4M022HDGVi8oJ0,51
+PIL/_webp.cpython-312-x86_64-linux-gnu.so,sha256=b7BwF1xSWrtlshE75Pghdmgvh_tdCpIZVsxYC0tAd-o,39417
+PIL/features.py,sha256=57SM06GH_FCbRtlIlz8yN8LImIpo9O2opicY1Kdj2zI,9618
+Pillow-10.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+Pillow-10.1.0.dist-info/LICENSE,sha256=OtPr9YP7Omve8YxTc488sNX88vfjc5xa5JjfKEpy0Bc,31122
+Pillow-10.1.0.dist-info/METADATA,sha256=81yUebiFN0hRP6hy1hc66PPuhPIipFWy_cvv-I11iks,9459
+Pillow-10.1.0.dist-info/RECORD,,
+Pillow-10.1.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+Pillow-10.1.0.dist-info/WHEEL,sha256=YY2fgn5urED3CfSOKUYcG2CAv8f1XOQiJaIq5htD2fA,114
+Pillow-10.1.0.dist-info/top_level.txt,sha256=riZqrk-hyZqh5f1Z0Zwii3dKfxEsByhu9cU9IODF-NY,4
+Pillow-10.1.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
+Pillow.libs/libXau-154567c4.so.6.0.0,sha256=BUhNJL94y47QMWnxywZyBNgpy3ryHeiCBADSnRFeQyA,22081
+Pillow.libs/libbrotlicommon-3ecfe81c.so.1,sha256=AkvHYFUCz_1Fs_fD83_gQ2lMw7S0y385rzuacnk-TC4,144425
+Pillow.libs/libbrotlidec-922c819b.so.1,sha256=CGnqJ6LQOPrJ57Pf2TUjXYf7UdIu2DbE5s4ZY-CXXWQ,58225
+Pillow.libs/libfreetype-82733d78.so.6.20.1,sha256=EusEyDP-gAG4vbd0MpBbfANJD_ho8wTCTD2-JAAOrbM,1422625
+Pillow.libs/libharfbuzz-e3b74c67.so.0.60821.0,sha256=nbmbJ6Mkk4mdBX_y0TJD3GShLRcNtoB2KK76uTpGDp4,3356665
+Pillow.libs/libjpeg-32b76cef.so.62.4.0,sha256=gAGI0CF-Wnp3HQ1ZXE_vInYruweYAYVMr8j5J5LOz3w,955073
+Pillow.libs/liblcms2-0821774a.so.2.0.15,sha256=8XrlZSyPgEtSJgYjfqdSxArEEK7hIx_9-PIJV0FuCh8,502529
+Pillow.libs/liblzma-1e44b93d.so.5.4.4,sha256=TDMO2qMcFvpe9gzTG-BPnquQRYC8CkiSEdKB7B5hKWw,270265
+Pillow.libs/libopenjp2-20e347f0.so.2.5.0,sha256=36Q8K57KjogaIVDJWcU7Bl9uHn4XuLl6EkhquAF1TlQ,578001
+Pillow.libs/libpng16-78d422d5.so.16.40.0,sha256=IGRppOJyIQZmAKNUperC1Ww2v0kJvnbb-S4cNI6C_aI,281937
+Pillow.libs/libsharpyuv-20f78091.so.0.0.1,sha256=jCg1XQ4-9EpGfwdfBBAutIfMfAHhNcLi-cS20-2_-Go,37713
+Pillow.libs/libtiff-91af027d.so.6.0.2,sha256=loMMcUWpvEbmVIb1nPHnjL0uyQciApxJfv0hppKRti4,725697
+Pillow.libs/libwebp-850e2bec.so.7.1.8,sha256=XLTrXx2r_1a9OdXijckePIPs5DiJJWrnP1QygvsEjLM,755753
+Pillow.libs/libwebpdemux-df9b36c7.so.2.0.14,sha256=-WWag67Dv66vNcjqXHc6L3tbI2SsyTkDnM_xSNCn12E,26121
+Pillow.libs/libwebpmux-9fe05867.so.3.0.13,sha256=7y_xyZyocyswZKKfgTGYZBOWefWKyWnDrDuiWmscRjo,54441
+Pillow.libs/libxcb-f0538cc0.so.1.1.0,sha256=qzk7IU7aiMrG3wJgfqeOpg1vM-xqaKn5X-dLBqlcsws,251425
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/REQUESTED b/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/REQUESTED
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/WHEEL
new file mode 100644
index 00000000..819b0054
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.41.2)
+Root-Is-Purelib: false
+Tag: cp312-cp312-manylinux_2_28_x86_64
+
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/top_level.txt b/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/top_level.txt
new file mode 100644
index 00000000..b338169c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+PIL
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/zip-safe b/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/zip-safe
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/Pillow-10.1.0.dist-info/zip-safe
@@ -0,0 +1 @@
+
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libXau-154567c4.so.6.0.0 b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libXau-154567c4.so.6.0.0
new file mode 100755
index 00000000..ff06a58b
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libXau-154567c4.so.6.0.0 differ
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libbrotlicommon-3ecfe81c.so.1 b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libbrotlicommon-3ecfe81c.so.1
new file mode 100755
index 00000000..aa7032c8
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libbrotlicommon-3ecfe81c.so.1 differ
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libbrotlidec-922c819b.so.1 b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libbrotlidec-922c819b.so.1
new file mode 100755
index 00000000..15a2ea06
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libbrotlidec-922c819b.so.1 differ
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libfreetype-82733d78.so.6.20.1 b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libfreetype-82733d78.so.6.20.1
new file mode 100755
index 00000000..23ba6a42
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libfreetype-82733d78.so.6.20.1 differ
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libharfbuzz-e3b74c67.so.0.60821.0 b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libharfbuzz-e3b74c67.so.0.60821.0
new file mode 100755
index 00000000..b8f7d4a1
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libharfbuzz-e3b74c67.so.0.60821.0 differ
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libjpeg-32b76cef.so.62.4.0 b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libjpeg-32b76cef.so.62.4.0
new file mode 100755
index 00000000..236df6f0
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libjpeg-32b76cef.so.62.4.0 differ
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow.libs/liblcms2-0821774a.so.2.0.15 b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/liblcms2-0821774a.so.2.0.15
new file mode 100755
index 00000000..0f8b60ba
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/liblcms2-0821774a.so.2.0.15 differ
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow.libs/liblzma-1e44b93d.so.5.4.4 b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/liblzma-1e44b93d.so.5.4.4
new file mode 100755
index 00000000..2902f3ac
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/liblzma-1e44b93d.so.5.4.4 differ
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libopenjp2-20e347f0.so.2.5.0 b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libopenjp2-20e347f0.so.2.5.0
new file mode 100755
index 00000000..57b163ae
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libopenjp2-20e347f0.so.2.5.0 differ
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libpng16-78d422d5.so.16.40.0 b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libpng16-78d422d5.so.16.40.0
new file mode 100755
index 00000000..3c550ee2
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libpng16-78d422d5.so.16.40.0 differ
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libsharpyuv-20f78091.so.0.0.1 b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libsharpyuv-20f78091.so.0.0.1
new file mode 100755
index 00000000..fde6a091
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libsharpyuv-20f78091.so.0.0.1 differ
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libtiff-91af027d.so.6.0.2 b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libtiff-91af027d.so.6.0.2
new file mode 100755
index 00000000..ed7dd9a4
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libtiff-91af027d.so.6.0.2 differ
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libwebp-850e2bec.so.7.1.8 b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libwebp-850e2bec.so.7.1.8
new file mode 100755
index 00000000..cb65cccc
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libwebp-850e2bec.so.7.1.8 differ
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libwebpdemux-df9b36c7.so.2.0.14 b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libwebpdemux-df9b36c7.so.2.0.14
new file mode 100755
index 00000000..02950840
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libwebpdemux-df9b36c7.so.2.0.14 differ
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libwebpmux-9fe05867.so.3.0.13 b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libwebpmux-9fe05867.so.3.0.13
new file mode 100755
index 00000000..24f10a58
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libwebpmux-9fe05867.so.3.0.13 differ
diff --git a/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libxcb-f0538cc0.so.1.1.0 b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libxcb-f0538cc0.so.1.1.0
new file mode 100755
index 00000000..3f64bbce
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/Pillow.libs/libxcb-f0538cc0.so.1.1.0 differ
diff --git a/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/LICENSE b/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/LICENSE
new file mode 100644
index 00000000..86b18e10
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2010, 2013 PyMySQL contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/METADATA
new file mode 100644
index 00000000..5477d25f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/METADATA
@@ -0,0 +1,134 @@
+Metadata-Version: 2.1
+Name: PyMySQL
+Version: 1.1.0
+Summary: Pure Python MySQL Driver
+Author-email: Inada Naoki , Yutaka Matsubara
+License: MIT License
+Project-URL: Project, https://github.com/PyMySQL/PyMySQL
+Project-URL: Documentation, https://pymysql.readthedocs.io/
+Keywords: MySQL
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Topic :: Database
+Requires-Python: >=3.7
+Description-Content-Type: text/markdown
+License-File: LICENSE
+Provides-Extra: ed25519
+Requires-Dist: PyNaCl (>=1.4.0) ; extra == 'ed25519'
+Provides-Extra: rsa
+Requires-Dist: cryptography ; extra == 'rsa'
+
+[](https://pymysql.readthedocs.io/)
+[](https://codecov.io/gh/PyMySQL/PyMySQL)
+
+# PyMySQL
+
+This package contains a pure-Python MySQL client library, based on [PEP
+249](https://www.python.org/dev/peps/pep-0249/).
+
+## Requirements
+
+- Python -- one of the following:
+ - [CPython](https://www.python.org/) : 3.7 and newer
+ - [PyPy](https://pypy.org/) : Latest 3.x version
+- MySQL Server -- one of the following:
+ - [MySQL](https://www.mysql.com/) \>= 5.7
+ - [MariaDB](https://mariadb.org/) \>= 10.4
+
+## Installation
+
+Package is uploaded on [PyPI](https://pypi.org/project/PyMySQL).
+
+You can install it with pip:
+
+ $ python3 -m pip install PyMySQL
+
+To use "sha256_password" or "caching_sha2_password" for authenticate,
+you need to install additional dependency:
+
+ $ python3 -m pip install PyMySQL[rsa]
+
+To use MariaDB's "ed25519" authentication method, you need to install
+additional dependency:
+
+ $ python3 -m pip install PyMySQL[ed25519]
+
+## Documentation
+
+Documentation is available online:
+
+For support, please refer to the
+[StackOverflow](https://stackoverflow.com/questions/tagged/pymysql).
+
+## Example
+
+The following examples make use of a simple table
+
+``` sql
+CREATE TABLE `users` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `email` varchar(255) COLLATE utf8_bin NOT NULL,
+ `password` varchar(255) COLLATE utf8_bin NOT NULL,
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin
+AUTO_INCREMENT=1 ;
+```
+
+``` python
+import pymysql.cursors
+
+# Connect to the database
+connection = pymysql.connect(host='localhost',
+ user='user',
+ password='passwd',
+ database='db',
+ cursorclass=pymysql.cursors.DictCursor)
+
+with connection:
+ with connection.cursor() as cursor:
+ # Create a new record
+ sql = "INSERT INTO `users` (`email`, `password`) VALUES (%s, %s)"
+ cursor.execute(sql, ('webmaster@python.org', 'very-secret'))
+
+ # connection is not autocommit by default. So you must commit to save
+ # your changes.
+ connection.commit()
+
+ with connection.cursor() as cursor:
+ # Read a single record
+ sql = "SELECT `id`, `password` FROM `users` WHERE `email`=%s"
+ cursor.execute(sql, ('webmaster@python.org',))
+ result = cursor.fetchone()
+ print(result)
+```
+
+This example will print:
+
+``` python
+{'password': 'very-secret', 'id': 1}
+```
+
+## Resources
+
+- DB-API 2.0:
+- MySQL Reference Manuals:
+- MySQL client/server protocol:
+
+- "Connector" channel in MySQL Community Slack:
+
+- PyMySQL mailing list:
+
+
+## License
+
+PyMySQL is released under the MIT License. See LICENSE for more
+information.
diff --git a/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/RECORD
new file mode 100644
index 00000000..257364a0
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/RECORD
@@ -0,0 +1,43 @@
+PyMySQL-1.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+PyMySQL-1.1.0.dist-info/LICENSE,sha256=MUEg3GXwgA9ziksxQAx27hTezR--d86cNUCkIbhup7Y,1070
+PyMySQL-1.1.0.dist-info/METADATA,sha256=FIAoGrL3L7e8pvWz1KxL5Wx7CtXH_QOwtTHq_hjBjYQ,4355
+PyMySQL-1.1.0.dist-info/RECORD,,
+PyMySQL-1.1.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+PyMySQL-1.1.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
+PyMySQL-1.1.0.dist-info/top_level.txt,sha256=IKlV-f4o90sOdnMd6HBvo0l2nqfJOGUzkwZeaEEGuRg,8
+pymysql/__init__.py,sha256=j699mDBexrjMZyGsM6LTZeww5aLtJfcAEpXJyJc6zac,4264
+pymysql/__pycache__/__init__.cpython-312.pyc,,
+pymysql/__pycache__/_auth.cpython-312.pyc,,
+pymysql/__pycache__/charset.cpython-312.pyc,,
+pymysql/__pycache__/connections.cpython-312.pyc,,
+pymysql/__pycache__/converters.cpython-312.pyc,,
+pymysql/__pycache__/cursors.cpython-312.pyc,,
+pymysql/__pycache__/err.cpython-312.pyc,,
+pymysql/__pycache__/optionfile.cpython-312.pyc,,
+pymysql/__pycache__/protocol.cpython-312.pyc,,
+pymysql/__pycache__/times.cpython-312.pyc,,
+pymysql/_auth.py,sha256=vDQm9OjORdkofdXiQMQ49RLWypMxa5zKLoS_GnvIcyQ,7416
+pymysql/charset.py,sha256=_f1uIga7AaWoeKLXzA-9Xra9jYPqqgDiT78ikqtn5yE,10238
+pymysql/connections.py,sha256=nktipI748AaKRu6q6hv0CsZ3KG6K9tWAkAWKsbwgSEg,53589
+pymysql/constants/CLIENT.py,sha256=SSvMFPZCTVMU1UWa4zOrfhYMDdR2wG2mS0E5GzJhDsg,878
+pymysql/constants/COMMAND.py,sha256=TGITAUcNWlq2Gwg2wv5UK2ykdTd4LYTk_EcJJOCpGIc,679
+pymysql/constants/CR.py,sha256=Qk35FWRMxRHd6Sa9CCIATMh7jegR3xnLdrdaBCT0dTQ,2320
+pymysql/constants/ER.py,sha256=nwqX_r0o4mmN4Cxm7NVRyJOTVov_5Gbl5peGe6oz5fk,12357
+pymysql/constants/FIELD_TYPE.py,sha256=ytFzgAnGmb9hvdsBlnK68qdZv_a6jYFIXT6VSAb60z8,370
+pymysql/constants/FLAG.py,sha256=Fy-PrCLnUI7fx_o5WypYnUAzWAM0E9d5yL8fFRVKffY,214
+pymysql/constants/SERVER_STATUS.py,sha256=m28Iq5JGCFCWLhafE73-iOvw_9gDGqnytW3NkHpbugA,333
+pymysql/constants/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pymysql/constants/__pycache__/CLIENT.cpython-312.pyc,,
+pymysql/constants/__pycache__/COMMAND.cpython-312.pyc,,
+pymysql/constants/__pycache__/CR.cpython-312.pyc,,
+pymysql/constants/__pycache__/ER.cpython-312.pyc,,
+pymysql/constants/__pycache__/FIELD_TYPE.cpython-312.pyc,,
+pymysql/constants/__pycache__/FLAG.cpython-312.pyc,,
+pymysql/constants/__pycache__/SERVER_STATUS.cpython-312.pyc,,
+pymysql/constants/__pycache__/__init__.cpython-312.pyc,,
+pymysql/converters.py,sha256=wxPYTl9matiMD-KYKtjB5ujHWllj1jc-kwWM6-L0oms,9591
+pymysql/cursors.py,sha256=a4-JHYP148kx-9qVNRz8vTtlilGlKDbk_QtFlWph5L4,16535
+pymysql/err.py,sha256=bpxayM4IUnFQAd8bUZ3PFsFomi9QSfBk-0TJXyKU2FI,3773
+pymysql/optionfile.py,sha256=eQoz6c43yvmHtp5MI9TB2GPRdoggOLemcUWABksfutk,651
+pymysql/protocol.py,sha256=zcYHCurGOymDgNo1DcCKThi_8zUnQOgaiu3M2VpqzfM,11863
+pymysql/times.py,sha256=_qXgDaYwsHntvpIKSKXp1rrYIgtq6Z9pLyLnO2XNoL0,360
diff --git a/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/REQUESTED b/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/REQUESTED
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/WHEEL
new file mode 100644
index 00000000..1f37c02f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.40.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/top_level.txt b/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/top_level.txt
new file mode 100644
index 00000000..d4a7eda5
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/PyMySQL-1.1.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+pymysql
diff --git a/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/LICENSE b/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/LICENSE
new file mode 100644
index 00000000..7bf9bbe9
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/LICENSE
@@ -0,0 +1,19 @@
+Copyright 2005-2023 SQLAlchemy authors and contributors .
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/METADATA
new file mode 100644
index 00000000..9f2808f0
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/METADATA
@@ -0,0 +1,241 @@
+Metadata-Version: 2.1
+Name: SQLAlchemy
+Version: 2.0.23
+Summary: Database Abstraction Library
+Home-page: https://www.sqlalchemy.org
+Author: Mike Bayer
+Author-email: mike_mp@zzzcomputing.com
+License: MIT
+Project-URL: Documentation, https://docs.sqlalchemy.org
+Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Database :: Front-Ends
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: typing-extensions >=4.2.0
+Requires-Dist: greenlet !=0.4.17 ; platform_machine == "aarch64" or (platform_machine == "ppc64le" or (platform_machine == "x86_64" or (platform_machine == "amd64" or (platform_machine == "AMD64" or (platform_machine == "win32" or platform_machine == "WIN32")))))
+Requires-Dist: importlib-metadata ; python_version < "3.8"
+Provides-Extra: aiomysql
+Requires-Dist: greenlet !=0.4.17 ; extra == 'aiomysql'
+Requires-Dist: aiomysql >=0.2.0 ; extra == 'aiomysql'
+Provides-Extra: aioodbc
+Requires-Dist: greenlet !=0.4.17 ; extra == 'aioodbc'
+Requires-Dist: aioodbc ; extra == 'aioodbc'
+Provides-Extra: aiosqlite
+Requires-Dist: greenlet !=0.4.17 ; extra == 'aiosqlite'
+Requires-Dist: aiosqlite ; extra == 'aiosqlite'
+Requires-Dist: typing-extensions !=3.10.0.1 ; extra == 'aiosqlite'
+Provides-Extra: asyncio
+Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncio'
+Provides-Extra: asyncmy
+Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncmy'
+Requires-Dist: asyncmy !=0.2.4,!=0.2.6,>=0.2.3 ; extra == 'asyncmy'
+Provides-Extra: mariadb_connector
+Requires-Dist: mariadb !=1.1.2,!=1.1.5,>=1.0.1 ; extra == 'mariadb_connector'
+Provides-Extra: mssql
+Requires-Dist: pyodbc ; extra == 'mssql'
+Provides-Extra: mssql_pymssql
+Requires-Dist: pymssql ; extra == 'mssql_pymssql'
+Provides-Extra: mssql_pyodbc
+Requires-Dist: pyodbc ; extra == 'mssql_pyodbc'
+Provides-Extra: mypy
+Requires-Dist: mypy >=0.910 ; extra == 'mypy'
+Provides-Extra: mysql
+Requires-Dist: mysqlclient >=1.4.0 ; extra == 'mysql'
+Provides-Extra: mysql_connector
+Requires-Dist: mysql-connector-python ; extra == 'mysql_connector'
+Provides-Extra: oracle
+Requires-Dist: cx-oracle >=8 ; extra == 'oracle'
+Provides-Extra: oracle_oracledb
+Requires-Dist: oracledb >=1.0.1 ; extra == 'oracle_oracledb'
+Provides-Extra: postgresql
+Requires-Dist: psycopg2 >=2.7 ; extra == 'postgresql'
+Provides-Extra: postgresql_asyncpg
+Requires-Dist: greenlet !=0.4.17 ; extra == 'postgresql_asyncpg'
+Requires-Dist: asyncpg ; extra == 'postgresql_asyncpg'
+Provides-Extra: postgresql_pg8000
+Requires-Dist: pg8000 >=1.29.1 ; extra == 'postgresql_pg8000'
+Provides-Extra: postgresql_psycopg
+Requires-Dist: psycopg >=3.0.7 ; extra == 'postgresql_psycopg'
+Provides-Extra: postgresql_psycopg2binary
+Requires-Dist: psycopg2-binary ; extra == 'postgresql_psycopg2binary'
+Provides-Extra: postgresql_psycopg2cffi
+Requires-Dist: psycopg2cffi ; extra == 'postgresql_psycopg2cffi'
+Provides-Extra: postgresql_psycopgbinary
+Requires-Dist: psycopg[binary] >=3.0.7 ; extra == 'postgresql_psycopgbinary'
+Provides-Extra: pymysql
+Requires-Dist: pymysql ; extra == 'pymysql'
+Provides-Extra: sqlcipher
+Requires-Dist: sqlcipher3-binary ; extra == 'sqlcipher'
+
+SQLAlchemy
+==========
+
+|PyPI| |Python| |Downloads|
+
+.. |PyPI| image:: https://img.shields.io/pypi/v/sqlalchemy
+ :target: https://pypi.org/project/sqlalchemy
+ :alt: PyPI
+
+.. |Python| image:: https://img.shields.io/pypi/pyversions/sqlalchemy
+ :target: https://pypi.org/project/sqlalchemy
+ :alt: PyPI - Python Version
+
+.. |Downloads| image:: https://static.pepy.tech/badge/sqlalchemy/month
+ :target: https://pepy.tech/project/sqlalchemy
+ :alt: PyPI - Downloads
+
+
+The Python SQL Toolkit and Object Relational Mapper
+
+Introduction
+-------------
+
+SQLAlchemy is the Python SQL toolkit and Object Relational Mapper
+that gives application developers the full power and
+flexibility of SQL. SQLAlchemy provides a full suite
+of well known enterprise-level persistence patterns,
+designed for efficient and high-performing database
+access, adapted into a simple and Pythonic domain
+language.
+
+Major SQLAlchemy features include:
+
+* An industrial strength ORM, built
+ from the core on the identity map, unit of work,
+ and data mapper patterns. These patterns
+ allow transparent persistence of objects
+ using a declarative configuration system.
+ Domain models
+ can be constructed and manipulated naturally,
+ and changes are synchronized with the
+ current transaction automatically.
+* A relationally-oriented query system, exposing
+ the full range of SQL's capabilities
+ explicitly, including joins, subqueries,
+ correlation, and most everything else,
+ in terms of the object model.
+ Writing queries with the ORM uses the same
+ techniques of relational composition you use
+ when writing SQL. While you can drop into
+ literal SQL at any time, it's virtually never
+ needed.
+* A comprehensive and flexible system
+ of eager loading for related collections and objects.
+ Collections are cached within a session,
+ and can be loaded on individual access, all
+ at once using joins, or by query per collection
+ across the full result set.
+* A Core SQL construction system and DBAPI
+ interaction layer. The SQLAlchemy Core is
+ separate from the ORM and is a full database
+ abstraction layer in its own right, and includes
+ an extensible Python-based SQL expression
+ language, schema metadata, connection pooling,
+ type coercion, and custom types.
+* All primary and foreign key constraints are
+ assumed to be composite and natural. Surrogate
+ integer primary keys are of course still the
+ norm, but SQLAlchemy never assumes or hardcodes
+ to this model.
+* Database introspection and generation. Database
+ schemas can be "reflected" in one step into
+ Python structures representing database metadata;
+ those same structures can then generate
+ CREATE statements right back out - all within
+ the Core, independent of the ORM.
+
+SQLAlchemy's philosophy:
+
+* SQL databases behave less and less like object
+ collections the more size and performance start to
+ matter; object collections behave less and less like
+ tables and rows the more abstraction starts to matter.
+ SQLAlchemy aims to accommodate both of these
+ principles.
+* An ORM doesn't need to hide the "R". A relational
+ database provides rich, set-based functionality
+ that should be fully exposed. SQLAlchemy's
+ ORM provides an open-ended set of patterns
+ that allow a developer to construct a custom
+ mediation layer between a domain model and
+ a relational schema, turning the so-called
+ "object relational impedance" issue into
+ a distant memory.
+* The developer, in all cases, makes all decisions
+ regarding the design, structure, and naming conventions
+ of both the object model as well as the relational
+ schema. SQLAlchemy only provides the means
+ to automate the execution of these decisions.
+* With SQLAlchemy, there's no such thing as
+ "the ORM generated a bad query" - you
+ retain full control over the structure of
+ queries, including how joins are organized,
+ how subqueries and correlation is used, what
+ columns are requested. Everything SQLAlchemy
+ does is ultimately the result of a developer-initiated
+ decision.
+* Don't use an ORM if the problem doesn't need one.
+ SQLAlchemy consists of a Core and separate ORM
+ component. The Core offers a full SQL expression
+ language that allows Pythonic construction
+ of SQL constructs that render directly to SQL
+ strings for a target database, returning
+ result sets that are essentially enhanced DBAPI
+ cursors.
+* Transactions should be the norm. With SQLAlchemy's
+ ORM, nothing goes to permanent storage until
+ commit() is called. SQLAlchemy encourages applications
+ to create a consistent means of delineating
+ the start and end of a series of operations.
+* Never render a literal value in a SQL statement.
+ Bound parameters are used to the greatest degree
+ possible, allowing query optimizers to cache
+ query plans effectively and making SQL injection
+ attacks a non-issue.
+
+Documentation
+-------------
+
+Latest documentation is at:
+
+https://www.sqlalchemy.org/docs/
+
+Installation / Requirements
+---------------------------
+
+Full documentation for installation is at
+`Installation `_.
+
+Getting Help / Development / Bug reporting
+------------------------------------------
+
+Please refer to the `SQLAlchemy Community Guide `_.
+
+Code of Conduct
+---------------
+
+Above all, SQLAlchemy places great emphasis on polite, thoughtful, and
+constructive communication between users and developers.
+Please see our current Code of Conduct at
+`Code of Conduct `_.
+
+License
+-------
+
+SQLAlchemy is distributed under the `MIT license
+`_.
+
diff --git a/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/RECORD
new file mode 100644
index 00000000..9569195d
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/RECORD
@@ -0,0 +1,530 @@
+SQLAlchemy-2.0.23.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+SQLAlchemy-2.0.23.dist-info/LICENSE,sha256=2lSTeluT1aC-5eJXO8vhkzf93qCSeV_mFXLrv3tNdIU,1100
+SQLAlchemy-2.0.23.dist-info/METADATA,sha256=znDChLueFNPCOPuNix-FfY7FG6aQOCM-lQwwN-cPLQs,9551
+SQLAlchemy-2.0.23.dist-info/RECORD,,
+SQLAlchemy-2.0.23.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+SQLAlchemy-2.0.23.dist-info/WHEEL,sha256=JmQLNqDEfvnYMfsIaVeSP3fmUcYDwmF12m3QYW0c7QQ,152
+SQLAlchemy-2.0.23.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11
+sqlalchemy/__init__.py,sha256=DjKCAltzrHGfaVdXVeFJpBmTaX6JmyloHANzewBUWo4,12708
+sqlalchemy/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/__pycache__/events.cpython-312.pyc,,
+sqlalchemy/__pycache__/exc.cpython-312.pyc,,
+sqlalchemy/__pycache__/inspection.cpython-312.pyc,,
+sqlalchemy/__pycache__/log.cpython-312.pyc,,
+sqlalchemy/__pycache__/schema.cpython-312.pyc,,
+sqlalchemy/__pycache__/types.cpython-312.pyc,,
+sqlalchemy/connectors/__init__.py,sha256=uKUYWQoXyleIyjWBuh7gzgnazJokx3DaasKJbFOfQGA,476
+sqlalchemy/connectors/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/connectors/__pycache__/aioodbc.cpython-312.pyc,,
+sqlalchemy/connectors/__pycache__/asyncio.cpython-312.pyc,,
+sqlalchemy/connectors/__pycache__/pyodbc.cpython-312.pyc,,
+sqlalchemy/connectors/aioodbc.py,sha256=QiafuN9bx_wcIs8tByLftTmGAegXPoFPwUaxCDU_ZQA,5737
+sqlalchemy/connectors/asyncio.py,sha256=ZZmJSFT50u-GEjZzytQOdB_tkBFxi3XPWRrNhs_nASc,6139
+sqlalchemy/connectors/pyodbc.py,sha256=NskMydn26ZkHL8aQ1V3L4WIAWin3zwJ5VEnlHvAD1DE,8453
+sqlalchemy/cyextension/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+sqlalchemy/cyextension/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/cyextension/collections.cpython-312-x86_64-linux-gnu.so,sha256=qPSMnyXVSLYHMr_ot_ZK7yEYadhTuT8ryb6eTMFFWrM,1947440
+sqlalchemy/cyextension/collections.pyx,sha256=KDI5QTOyYz9gDl-3d7MbGMA0Kc-wxpJqnLmCaUmQy2U,12323
+sqlalchemy/cyextension/immutabledict.cpython-312-x86_64-linux-gnu.so,sha256=J9m0gK6R8PGR36jxAKx415VxX0-0fqvbQAP9-DDU1qA,811232
+sqlalchemy/cyextension/immutabledict.pxd,sha256=oc8BbnQwDg7pWAdThB-fzu8s9_ViOe1Ds-8T0r0POjI,41
+sqlalchemy/cyextension/immutabledict.pyx,sha256=aQJPZKjcqbO8jHDqpC9F-v-ew2qAjUscc5CntaheZUk,3285
+sqlalchemy/cyextension/processors.cpython-312-x86_64-linux-gnu.so,sha256=WOLcEWRNXn4UtJGhzF5B1h7JpPPfn-ziQMT0lkhobQE,533968
+sqlalchemy/cyextension/processors.pyx,sha256=0swFIBdR19x1kPRe-dijBaLW898AhH6QJizbv4ho9pk,1545
+sqlalchemy/cyextension/resultproxy.cpython-312-x86_64-linux-gnu.so,sha256=bte73oURZXuV7YvkjyGo-OjRCnSgYukqDp5KM9-Z8xY,626112
+sqlalchemy/cyextension/resultproxy.pyx,sha256=cDtMjLTdC47g7cME369NSOCck3JwG2jwZ6j25no3_gw,2477
+sqlalchemy/cyextension/util.cpython-312-x86_64-linux-gnu.so,sha256=8yMbb069NQN1b6yAsCBCMpbX94sH4iLs61vPNxd0bOg,958760
+sqlalchemy/cyextension/util.pyx,sha256=lv03p63oVn23jLhMI4_RYGewUnJfh-4FkrNMEFL7A3Y,2289
+sqlalchemy/dialects/__init__.py,sha256=hLsgIEomunlp4mNLnvjCQTLOnBVva8N7IT2-RYrN2_4,1770
+sqlalchemy/dialects/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/dialects/__pycache__/_typing.cpython-312.pyc,,
+sqlalchemy/dialects/_typing.py,sha256=P2ML2o4b_bWAAy3zbdoUjx3vXsMNwpiOblef8ThCxlM,648
+sqlalchemy/dialects/mssql/__init__.py,sha256=CYbbydyMSLjUq8vY1siNStd4lvjVXod8ddeDS6ELHLk,1871
+sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/dialects/mssql/__pycache__/aioodbc.cpython-312.pyc,,
+sqlalchemy/dialects/mssql/__pycache__/base.cpython-312.pyc,,
+sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-312.pyc,,
+sqlalchemy/dialects/mssql/__pycache__/json.cpython-312.pyc,,
+sqlalchemy/dialects/mssql/__pycache__/provision.cpython-312.pyc,,
+sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-312.pyc,,
+sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-312.pyc,,
+sqlalchemy/dialects/mssql/aioodbc.py,sha256=ncj3yyfvW91o3g19GB5s1I0oaZKUO_P-R2nwnLF0t9E,2013
+sqlalchemy/dialects/mssql/base.py,sha256=l9vX6fK6DJEYA00N9uDnvSbqfgvxXfYUn2C4AF5T920,133649
+sqlalchemy/dialects/mssql/information_schema.py,sha256=ll0zAupJ4cPvhi9v5hTi7PQLU1lae4o6eQ5Vg7gykXQ,8074
+sqlalchemy/dialects/mssql/json.py,sha256=B0m6H08CKuk-yomDHcCwfQbVuVN2WLufuVueA_qb1NQ,4573
+sqlalchemy/dialects/mssql/provision.py,sha256=x7XRSQDxz4jz2uIpqwhuIXpL9bic0Vw7Mhy39HOkyqY,5013
+sqlalchemy/dialects/mssql/pymssql.py,sha256=BfJp9t-IQabqWXySJBmP9pwNTWnJqbjA2jJM9M4XeWc,4029
+sqlalchemy/dialects/mssql/pyodbc.py,sha256=qwZ8ByOTZ1WObjxeOravoJBSBX-s4RJ_PZ5VJ_Ch5Ws,27048
+sqlalchemy/dialects/mysql/__init__.py,sha256=btLABiNnmbWt9ziW-XgVWEB1qHWQcSFz7zxZNw4m_LY,2144
+sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/aiomysql.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/asyncmy.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/base.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/dml.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/expression.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/json.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/mariadb.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/mariadbconnector.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/provision.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/reserved_words.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/types.cpython-312.pyc,,
+sqlalchemy/dialects/mysql/aiomysql.py,sha256=Zb-_F9Pzl0t-fT1bZwbNNne6jjCUqBXxeizbhMFPqls,9750
+sqlalchemy/dialects/mysql/asyncmy.py,sha256=zqupDz7AJihjv3E8w_4XAtq95d8stdrETNx60MLNVr0,9819
+sqlalchemy/dialects/mysql/base.py,sha256=q-DzkR_txwDTeWTEByzHAoIArYU3Bb5HT2Bnmuw7WIM,120688
+sqlalchemy/dialects/mysql/cymysql.py,sha256=5CQVJAlqQ3pT4IDGSQJH2hCzj-EWjUitA21MLqJwEEs,2291
+sqlalchemy/dialects/mysql/dml.py,sha256=qw0ZweHbMsbNyVSfC17HqylCnf7XAuIjtgofiWABT8k,7636
+sqlalchemy/dialects/mysql/enumerated.py,sha256=1L2J2wT6nQEmRS4z-jzZpoi44IqIaHgBRZZB9m55czo,8439
+sqlalchemy/dialects/mysql/expression.py,sha256=WW5G2XPwqJfXjuzHBt4BRP0pCLcPJkPD1mvZX1g0JL0,4066
+sqlalchemy/dialects/mysql/json.py,sha256=JlSFBAHhJ9JmV-3azH80xkLgeh7g6A6DVyNVCNZiKPU,2260
+sqlalchemy/dialects/mysql/mariadb.py,sha256=Sugyngvo6j6SfFFuJ23rYeFWEPdZ9Ji9guElsk_1WSQ,844
+sqlalchemy/dialects/mysql/mariadbconnector.py,sha256=F1VPosecC1hDZqjzZI29j4GUduyU4ewPwb-ekBQva5w,8725
+sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=5glmkPhD_KP-Mci8ZXBr4yzqH1MDfzCJ9F_kZNyXcGo,5666
+sqlalchemy/dialects/mysql/mysqldb.py,sha256=R5BDiXiHX5oFuAOzyxZ6TYUTGzly-dulMeQLkeia6kk,9649
+sqlalchemy/dialects/mysql/provision.py,sha256=uPT6-BIoP_12XLmWAza1TDFNhOVVJ3rmQoMH7nvh-Vg,3226
+sqlalchemy/dialects/mysql/pymysql.py,sha256=d2-00IPoyEDkR9REQTE-DGEQrGshUq_0G5liZ5FiSEM,4032
+sqlalchemy/dialects/mysql/pyodbc.py,sha256=mkOvumrxpmAi6noZlkaTVKz2F7G5vLh2vx0cZSn9VTA,4288
+sqlalchemy/dialects/mysql/reflection.py,sha256=ak6E-eCP9346ixnILYNJcrRYblWbIT0sjXf4EqmfBsY,22556
+sqlalchemy/dialects/mysql/reserved_words.py,sha256=DsPHsW3vwOrvU7bv3Nbfact2Z_jyZ9xUTT-mdeQvqxo,9145
+sqlalchemy/dialects/mysql/types.py,sha256=i8DpRkOL1QhPErZ25AmCQOuFLciWhdjNL3I0CeHEhdY,24258
+sqlalchemy/dialects/oracle/__init__.py,sha256=pjk1aWi9XFCAHWNSJzSzmoIcL32-AkU_1J9IV4PtwpA,1318
+sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/dialects/oracle/__pycache__/base.cpython-312.pyc,,
+sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-312.pyc,,
+sqlalchemy/dialects/oracle/__pycache__/dictionary.cpython-312.pyc,,
+sqlalchemy/dialects/oracle/__pycache__/oracledb.cpython-312.pyc,,
+sqlalchemy/dialects/oracle/__pycache__/provision.cpython-312.pyc,,
+sqlalchemy/dialects/oracle/__pycache__/types.cpython-312.pyc,,
+sqlalchemy/dialects/oracle/base.py,sha256=u55_R9NrCRijud7ioHMxT-r0MSW0gMFjOwbrDdPgFsc,118036
+sqlalchemy/dialects/oracle/cx_oracle.py,sha256=L0GvcB6xb0-zyv5dx3bpQCeptp0KSqH6g9FUQ4y-d-g,55108
+sqlalchemy/dialects/oracle/dictionary.py,sha256=iUoyFEFM8z0sfVWR2n_nnre14kaQkV_syKO0R5Dos4M,19487
+sqlalchemy/dialects/oracle/oracledb.py,sha256=_-fUQ94xai80B7v9WLVGoGDIv8u54nVspBdyGEyI76g,3457
+sqlalchemy/dialects/oracle/provision.py,sha256=5cvIc3yTWxz4AIRYxcesbRJ1Ft-zT9GauQ911yPnN2o,8055
+sqlalchemy/dialects/oracle/types.py,sha256=TeOhUW5W9qZC8SaJ-9b3u6OvOPOarNq4MmCQ7l3wWX0,8204
+sqlalchemy/dialects/postgresql/__init__.py,sha256=bZEPsLbRtB7s6TMQAHCIzKBgkxUa3eDXvCkeARua37E,3734
+sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/_psycopg_common.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/array.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/base.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/json.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/named_types.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/operators.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/pg_catalog.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/provision.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/psycopg.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/types.cpython-312.pyc,,
+sqlalchemy/dialects/postgresql/_psycopg_common.py,sha256=U3aWzbKD3VOj6Z6r-4IsIQmtjGGIB4RDZH6NXfd8Xz0,5655
+sqlalchemy/dialects/postgresql/array.py,sha256=tLyU9GDAeIypNhjTuFQUYbaTeijVM1VVJS6UdzzXXn4,13682
+sqlalchemy/dialects/postgresql/asyncpg.py,sha256=XNaoOZ5Da4-jUTaES1zEOTEW3WG8UKyVCoIS3LsFhzE,39967
+sqlalchemy/dialects/postgresql/base.py,sha256=DGhaquFJWDQL7wIvQ2EE57LxD7zGR06BKQxvNZHFLgY,175634
+sqlalchemy/dialects/postgresql/dml.py,sha256=_He69efdpDA5gGmBsE7Lo4ViSi3QnR38BiFmrR1tw6k,11203
+sqlalchemy/dialects/postgresql/ext.py,sha256=oPP22Pq-n2lMmQ8ahifYmsmzRhSiSv1RV-xrTT0gycw,16253
+sqlalchemy/dialects/postgresql/hstore.py,sha256=q5x0npbAMI8cdRFGTMwLoWFj9P1G9DUkw5OEUCfTXpI,11532
+sqlalchemy/dialects/postgresql/json.py,sha256=panGtnEbcirQDy4yR2huWydFqa_Kmv8xhpLyf-SSRWE,11203
+sqlalchemy/dialects/postgresql/named_types.py,sha256=zNoHsP3nVq5xxA7SOQ6LLDwYZEHFciZ-nDjw_I9f_G0,17092
+sqlalchemy/dialects/postgresql/operators.py,sha256=MB40xq1124OnhUzkvtbnTmxEiey0VxMOYyznF96wwhI,2799
+sqlalchemy/dialects/postgresql/pg8000.py,sha256=w6pJ3LaIKWmnwvB0Pr1aTJX5OKNtG5RNClVfkE019vU,18620
+sqlalchemy/dialects/postgresql/pg_catalog.py,sha256=0lLnIgxfCrqkx_LNijMxo0trNLsodcd8KwretZIj4uM,8875
+sqlalchemy/dialects/postgresql/provision.py,sha256=oxyAzs8_PhuK0ChivXC3l2Nldih3_HKffvGsZqD8XWI,5509
+sqlalchemy/dialects/postgresql/psycopg.py,sha256=YMubzQHMYN1By8QJScIPb_PwNiACv6srddQ6nX6WltQ,22238
+sqlalchemy/dialects/postgresql/psycopg2.py,sha256=3Xci4bTA2BvhrZAQa727uFWdaXEZmvfD-Z-upE3NyQE,31592
+sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=2EOuDwBetfvelcPoTzSwOHe6X8lTwaYH7znNzXJt9eM,1739
+sqlalchemy/dialects/postgresql/ranges.py,sha256=yHB1BRlUreQPZB3VEn0KMMLf02zjf5jjYdmg4N4S2Sw,30220
+sqlalchemy/dialects/postgresql/types.py,sha256=l24rs8_nK4vqLyQC0aUkf4S7ecw6T_7Pgq50Icc5CBs,7292
+sqlalchemy/dialects/sqlite/__init__.py,sha256=wnZ9vtfm0QXmth1jiGiubFgRiKxIoQoNthb1bp4FhCs,1173
+sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/dialects/sqlite/__pycache__/aiosqlite.cpython-312.pyc,,
+sqlalchemy/dialects/sqlite/__pycache__/base.cpython-312.pyc,,
+sqlalchemy/dialects/sqlite/__pycache__/dml.cpython-312.pyc,,
+sqlalchemy/dialects/sqlite/__pycache__/json.cpython-312.pyc,,
+sqlalchemy/dialects/sqlite/__pycache__/provision.cpython-312.pyc,,
+sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-312.pyc,,
+sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-312.pyc,,
+sqlalchemy/dialects/sqlite/aiosqlite.py,sha256=GZJioZLot0D5CQ6ovPQoqv2iV8FAFm3G75lEFCzopoE,12296
+sqlalchemy/dialects/sqlite/base.py,sha256=YYEB5BeuemLC3FAR7EB8vA0zoUOwHTKoF_srvnAStps,96785
+sqlalchemy/dialects/sqlite/dml.py,sha256=PYESBj8Ip7bGs_Fi7QjbWLXLnU9a-SbP96JZiUoZNHg,8434
+sqlalchemy/dialects/sqlite/json.py,sha256=XFPwSdNx0DxDfxDZn7rmGGqsAgL4vpJbjjGaA73WruQ,2533
+sqlalchemy/dialects/sqlite/provision.py,sha256=O4JDoybdb2RBblXErEVPE2P_5xHab927BQItJa203zU,5383
+sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=_JuOCoic--ehAGkCgnwUUKKTs6xYoBGag4Y_WkQUDwU,5347
+sqlalchemy/dialects/sqlite/pysqlite.py,sha256=xBg6DKqvml5cCGxVSAQxR1dcMvso8q4uyXs2m4WLzz0,27891
+sqlalchemy/dialects/type_migration_guidelines.txt,sha256=-uHNdmYFGB7bzUNT6i8M5nb4j6j9YUKAtW4lcBZqsMg,8239
+sqlalchemy/engine/__init__.py,sha256=fJCAl5P7JH9iwjuWo72_3LOIzWWhTnvXqzpAmm_T0fY,2818
+sqlalchemy/engine/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/engine/__pycache__/_py_processors.cpython-312.pyc,,
+sqlalchemy/engine/__pycache__/_py_row.cpython-312.pyc,,
+sqlalchemy/engine/__pycache__/_py_util.cpython-312.pyc,,
+sqlalchemy/engine/__pycache__/base.cpython-312.pyc,,
+sqlalchemy/engine/__pycache__/characteristics.cpython-312.pyc,,
+sqlalchemy/engine/__pycache__/create.cpython-312.pyc,,
+sqlalchemy/engine/__pycache__/cursor.cpython-312.pyc,,
+sqlalchemy/engine/__pycache__/default.cpython-312.pyc,,
+sqlalchemy/engine/__pycache__/events.cpython-312.pyc,,
+sqlalchemy/engine/__pycache__/interfaces.cpython-312.pyc,,
+sqlalchemy/engine/__pycache__/mock.cpython-312.pyc,,
+sqlalchemy/engine/__pycache__/processors.cpython-312.pyc,,
+sqlalchemy/engine/__pycache__/reflection.cpython-312.pyc,,
+sqlalchemy/engine/__pycache__/result.cpython-312.pyc,,
+sqlalchemy/engine/__pycache__/row.cpython-312.pyc,,
+sqlalchemy/engine/__pycache__/strategies.cpython-312.pyc,,
+sqlalchemy/engine/__pycache__/url.cpython-312.pyc,,
+sqlalchemy/engine/__pycache__/util.cpython-312.pyc,,
+sqlalchemy/engine/_py_processors.py,sha256=RSVKm9YppSBDSCEi8xvbZdRCP9EsCYfbyEg9iDCMCiI,3744
+sqlalchemy/engine/_py_row.py,sha256=Zdta0JGa7V2aV04L7nzXUEp-H1gpresKyBlneQu60pk,3549
+sqlalchemy/engine/_py_util.py,sha256=5m3MZbEqnUwP5kK_ghisFpzcXgBwSxTSkBEFB6afiD8,2245
+sqlalchemy/engine/base.py,sha256=RbIfWZ1Otyb4VzMYjDpK5BiDIE8QZwa4vQgRX0yCa28,122246
+sqlalchemy/engine/characteristics.py,sha256=YvMgrUVAt3wsSiQ0K8l44yBjFlMK3MGajxhg50t5yFM,2344
+sqlalchemy/engine/create.py,sha256=8372TLpy4FOAIZ9WmuNkx1v9DPgwpoCAH9P7LNXZCwY,32629
+sqlalchemy/engine/cursor.py,sha256=6e1Tp63r0Kt-P4pEaYR7wUew2aClTdKAEI-FoAAxJxE,74405
+sqlalchemy/engine/default.py,sha256=bi--ytxYJ0EtsCudl38owGtytnwTHX-PjlsYTFe8LpA,84065
+sqlalchemy/engine/events.py,sha256=PQyc_sbmqks6pqyN7xitO658KdKzzJWfW1TKYwEd5vo,37392
+sqlalchemy/engine/interfaces.py,sha256=pAFYR15f1Z_-qdzTYI4mAm8IYbD6maLBKbG3pBaJ8Us,112824
+sqlalchemy/engine/mock.py,sha256=ki4ud7YrUrzP2katdkxlJGFUKB2kS7cZZAHK5xWsNF8,4179
+sqlalchemy/engine/processors.py,sha256=ENN6XwndxJPW-aXPu_3NzAZsy5SvNznHoa1Qn29ERAw,2383
+sqlalchemy/engine/reflection.py,sha256=2aakNheQJNMUXZbhY8s1NtqGoGWTxM2THkJlMMfiX_s,75125
+sqlalchemy/engine/result.py,sha256=shRAsboHPTvKR38ryGgC4KLcUeVTbABSlWzAfOUKVZs,77841
+sqlalchemy/engine/row.py,sha256=doiXKaUI6s6OkfqPIwNyTPLllxJfR8HYgEI8ve9VYe0,11955
+sqlalchemy/engine/strategies.py,sha256=HjCj_FHQOgkkhhtnVmcOEuHI_cftNo3P0hN5zkhZvDc,442
+sqlalchemy/engine/url.py,sha256=_WNE7ia0JIPRc1PLY_jSA3F7bB5kp1gzuzkc5eoKviA,30694
+sqlalchemy/engine/util.py,sha256=3-ENI9S-3KLWr0GW27uWQfsvCJwMBGTKbykkKPUgiAE,5667
+sqlalchemy/event/__init__.py,sha256=CSBMp0yu5joTC6tWvx40B4p87N7oGKxC-ZLx2ULKUnQ,997
+sqlalchemy/event/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/event/__pycache__/api.cpython-312.pyc,,
+sqlalchemy/event/__pycache__/attr.cpython-312.pyc,,
+sqlalchemy/event/__pycache__/base.cpython-312.pyc,,
+sqlalchemy/event/__pycache__/legacy.cpython-312.pyc,,
+sqlalchemy/event/__pycache__/registry.cpython-312.pyc,,
+sqlalchemy/event/api.py,sha256=nQAvPK1jrLpmu8aKCUtc-vYWcIuG-1FgAtp3GRkfIiI,8227
+sqlalchemy/event/attr.py,sha256=NMe_sPQTju2PE-f68C8TcKJGW-Gxyi1CLXumAmE368Y,20438
+sqlalchemy/event/base.py,sha256=Cr_PNJlCYJSU3rtT8DkplyjBRb-E2Wa3OAeK9woFJkk,14980
+sqlalchemy/event/legacy.py,sha256=OpPqE64xk1OYjLW1scvc6iijhoa5GZJt5f7-beWhgOc,8211
+sqlalchemy/event/registry.py,sha256=Zig9q2Galo8kO2aqr7a2rNAhmIkdJ-ntHSEcM5MfSgw,10833
+sqlalchemy/events.py,sha256=pRcPKKsPQHGPH_pvTtKRmzuEIy-QHCtkUiZl4MUbxKs,536
+sqlalchemy/exc.py,sha256=4SMKOJtz7_SWt5vskCSeXSi4ZlFyL4jh53Q8sk4-ODQ,24011
+sqlalchemy/ext/__init__.py,sha256=w4h7EpXjKPr0LD4yHa0pDCfrvleU3rrX7mgyb8RuDYQ,322
+sqlalchemy/ext/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/ext/__pycache__/associationproxy.cpython-312.pyc,,
+sqlalchemy/ext/__pycache__/automap.cpython-312.pyc,,
+sqlalchemy/ext/__pycache__/baked.cpython-312.pyc,,
+sqlalchemy/ext/__pycache__/compiler.cpython-312.pyc,,
+sqlalchemy/ext/__pycache__/horizontal_shard.cpython-312.pyc,,
+sqlalchemy/ext/__pycache__/hybrid.cpython-312.pyc,,
+sqlalchemy/ext/__pycache__/indexable.cpython-312.pyc,,
+sqlalchemy/ext/__pycache__/instrumentation.cpython-312.pyc,,
+sqlalchemy/ext/__pycache__/mutable.cpython-312.pyc,,
+sqlalchemy/ext/__pycache__/orderinglist.cpython-312.pyc,,
+sqlalchemy/ext/__pycache__/serializer.cpython-312.pyc,,
+sqlalchemy/ext/associationproxy.py,sha256=5voNXWIJYGt6c8mwuSA6alm3SmEHOZ-CVK8ikgfzk8s,65960
+sqlalchemy/ext/asyncio/__init__.py,sha256=iG_0TmBO1pCB316WS-p17AImwqRtUoaKo7UphYZ7bYw,1317
+sqlalchemy/ext/asyncio/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/ext/asyncio/__pycache__/base.cpython-312.pyc,,
+sqlalchemy/ext/asyncio/__pycache__/engine.cpython-312.pyc,,
+sqlalchemy/ext/asyncio/__pycache__/exc.cpython-312.pyc,,
+sqlalchemy/ext/asyncio/__pycache__/result.cpython-312.pyc,,
+sqlalchemy/ext/asyncio/__pycache__/scoping.cpython-312.pyc,,
+sqlalchemy/ext/asyncio/__pycache__/session.cpython-312.pyc,,
+sqlalchemy/ext/asyncio/base.py,sha256=PXF4YqfRi2-mADAtaL2_-Uv7CzoBVojPbzyA5phJ9To,8959
+sqlalchemy/ext/asyncio/engine.py,sha256=h4pe3ixuX6YfI97B5QWo2V4_CCCnOvM_EHPZhX19Mgc,47796
+sqlalchemy/ext/asyncio/exc.py,sha256=1hCdOKzvSryc_YE4jgj0l9JASOmZXutdzShEYPiLbGI,639
+sqlalchemy/ext/asyncio/result.py,sha256=zETerVB53gql1DL6tkO_JiqeU-m1OM-8kX0ULxmoL_I,30554
+sqlalchemy/ext/asyncio/scoping.py,sha256=cBNluB7n_lwdAAo6pySbvNRqPN7UBzwQHZ6XhRDyWgA,52685
+sqlalchemy/ext/asyncio/session.py,sha256=yWwhI5i_yVWjykxmxkcP3-xmw3UpoGYNhHZL8sYXQMA,62998
+sqlalchemy/ext/automap.py,sha256=7p13-VpN0MOM525r7pmEnftedya9l5G-Ei_cFXZfpTc,61431
+sqlalchemy/ext/baked.py,sha256=R8ZAxiVN6eH50AJu0O3TtFXNE1tnRkMlSj3AvkcWFhY,17818
+sqlalchemy/ext/compiler.py,sha256=h7eR0NcPJ4F_k8YGRP3R9YX75Y9pgiVxoCjRyvceF7g,20391
+sqlalchemy/ext/declarative/__init__.py,sha256=VJu8S1efxil20W48fJlpDn6gHorOudn5p3-lF72WcJ8,1818
+sqlalchemy/ext/declarative/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/ext/declarative/__pycache__/extensions.cpython-312.pyc,,
+sqlalchemy/ext/declarative/extensions.py,sha256=vwZjudPFA_mao1U04-RZCaU_tvPMBgQa5OTmSI7K7SU,19547
+sqlalchemy/ext/horizontal_shard.py,sha256=eh14W8QWHYH22PL1l5qF_ad9Fyh1WAFjKi_vNfsme94,16766
+sqlalchemy/ext/hybrid.py,sha256=98D72WBmlileYBtEKMSNF9l-bwRavThSV8-LyB2gjo0,52499
+sqlalchemy/ext/indexable.py,sha256=RkG9BKwil-TqDjVBM14ML9c-geUrHxtRKpYkSJEwGHA,11028
+sqlalchemy/ext/instrumentation.py,sha256=rjjSbTGilYeGLdyEWV932TfTaGxiVP44_RajinANk54,15723
+sqlalchemy/ext/mutable.py,sha256=d3Pp8PcAVN4pHN9rhc1ReXBWe0Q70Q5S1klFoYGyDPA,37393
+sqlalchemy/ext/mypy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+sqlalchemy/ext/mypy/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/ext/mypy/__pycache__/apply.cpython-312.pyc,,
+sqlalchemy/ext/mypy/__pycache__/decl_class.cpython-312.pyc,,
+sqlalchemy/ext/mypy/__pycache__/infer.cpython-312.pyc,,
+sqlalchemy/ext/mypy/__pycache__/names.cpython-312.pyc,,
+sqlalchemy/ext/mypy/__pycache__/plugin.cpython-312.pyc,,
+sqlalchemy/ext/mypy/__pycache__/util.cpython-312.pyc,,
+sqlalchemy/ext/mypy/apply.py,sha256=uUES4grydYtKykLKlxzJeBXeGe8kfWou9_rzEyEkfp0,10503
+sqlalchemy/ext/mypy/decl_class.py,sha256=Ls2Efh4kEhle6Z4VMz0GRBgGQTYs2fHr5b4DfuDj44c,17377
+sqlalchemy/ext/mypy/infer.py,sha256=si720RW6iGxMRZNP5tcaIxA1_ehFp215TzxVXaLjglU,19364
+sqlalchemy/ext/mypy/names.py,sha256=tch4f5fDmdv4AWWFzXgGZdCpxmae59XRPT02KyMvrEI,10625
+sqlalchemy/ext/mypy/plugin.py,sha256=fLXDukvZqbJ0JJCOoyZAuOniYZ_F1YT-l9gKppu8SEs,9750
+sqlalchemy/ext/mypy/util.py,sha256=TlEQq4bcs8ARLL3PoFS8Qw6oYFeMqcGnWTeJ7NsPPFk,9408
+sqlalchemy/ext/orderinglist.py,sha256=8Vcg7UUkLg-QbYAbLVDSqu-5REkR6L-FLLhCYsHYxCQ,14384
+sqlalchemy/ext/serializer.py,sha256=ox6dbMOBmFR0H2RQFt17mcYBOGKgn1cNVFfqY8-jpgQ,6178
+sqlalchemy/future/__init__.py,sha256=79DZx3v7TQZpkS_qThlmuCOm1a9UK2ObNZhyMmjfNB0,516
+sqlalchemy/future/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/future/__pycache__/engine.cpython-312.pyc,,
+sqlalchemy/future/engine.py,sha256=6uOpOedIqiT1-3qJSJIlv9_raMJU8NTkhQwN_Ngg8kI,499
+sqlalchemy/inspection.py,sha256=i3aR-IV101YU8D9TA8Pxb2wi08QZuJ34sMy6L5M__rY,5145
+sqlalchemy/log.py,sha256=aSlZ8DFHkOuI-AMmaOUUYtS9zGPadi_7tAo98QpUOiY,8634
+sqlalchemy/orm/__init__.py,sha256=cBn0aPWyDFY4ya-cHRshQBcuThk1smTUCTrlp6LHdlE,8463
+sqlalchemy/orm/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/_orm_constructors.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/_typing.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/attributes.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/base.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/bulk_persistence.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/clsregistry.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/collections.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/context.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/decl_api.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/decl_base.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/dependency.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/descriptor_props.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/dynamic.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/evaluator.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/events.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/exc.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/identity.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/instrumentation.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/interfaces.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/loading.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/mapped_collection.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/mapper.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/path_registry.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/persistence.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/properties.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/query.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/relationships.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/scoping.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/session.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/state.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/state_changes.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/strategies.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/strategy_options.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/sync.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/unitofwork.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/util.cpython-312.pyc,,
+sqlalchemy/orm/__pycache__/writeonly.cpython-312.pyc,,
+sqlalchemy/orm/_orm_constructors.py,sha256=_7_GY6qw2sA-GG_WXLz1GOO-0qC-SCBeA43GhVuS2Qw,99803
+sqlalchemy/orm/_typing.py,sha256=oRUJVAGpU3_DhSkIb1anXgneweVIARjB51HlPhMNfcM,5015
+sqlalchemy/orm/attributes.py,sha256=NFhYheqqu2VcXmKTdcvQKiRR_6qo0rHLK7nda7rpviA,92578
+sqlalchemy/orm/base.py,sha256=iZXsygk4fn8wd7wx1iXn_PfnGDY7d41YRfS0mC_q5vE,27700
+sqlalchemy/orm/bulk_persistence.py,sha256=S9VK5a6GSqnw3z7O5UG5OOnc9WxzmS_ooDkA5JmCIsY,69878
+sqlalchemy/orm/clsregistry.py,sha256=4J-kKshmLOEyx3VBqREm2k_XY0cer4zwUoHJT3n5Xmw,17949
+sqlalchemy/orm/collections.py,sha256=0AZFr9us9MiHo_Xcyi7DUsN02jSBERUOd-jIK8qQ1DA,52159
+sqlalchemy/orm/context.py,sha256=VyJl1ZJ5OnJUACKlM-bPLyyoqu4tyaKKdxeC-QF4EuU,111698
+sqlalchemy/orm/decl_api.py,sha256=a2Cyvjh6j5BlXJQ2i0jpQx7xkeI_6xo5MMxr0d2ndQY,63589
+sqlalchemy/orm/decl_base.py,sha256=g9xW9G-n9iStMI0i3i-9Rt4LDRW8--3iCCRPlWF6Cko,81660
+sqlalchemy/orm/dependency.py,sha256=g3R_1H_OGzagXFeen3Irm3c1lO3yeXGdGa0muUZgZAk,47583
+sqlalchemy/orm/descriptor_props.py,sha256=SdrfVu05zhWLGe_DnBlgbU6e5sWkkfBTirH9Nrr1MLk,37176
+sqlalchemy/orm/dynamic.py,sha256=pYlMIrpp80Ex4KByqdyhx0x0kIrl_cIADwkeVxvYu4s,9798
+sqlalchemy/orm/evaluator.py,sha256=jPjVrP7XbVOG6aXTCBREq0rF3oNHLqB4XAT-gt_cpaA,11925
+sqlalchemy/orm/events.py,sha256=fGnUHwDTV9FTiifB2mmIJispwPbIT4mZongRJD7uiw4,127258
+sqlalchemy/orm/exc.py,sha256=A3wvZVs5sC5XCef4LoTUBG-UfhmliFpU9rYMdS2t_To,7356
+sqlalchemy/orm/identity.py,sha256=gRiuQSrurHGEAJXH9QGYioXL49Im5EGcYQ-IKUEpHmQ,9249
+sqlalchemy/orm/instrumentation.py,sha256=o1mTv5gCgl9d-SRvEXXjl8rzl8uBasRL3bpDgWg9P58,24337
+sqlalchemy/orm/interfaces.py,sha256=RW7bBXGWtZHY2wXFOSqtvYm6UDl7yHZUyRX_6Yd3GfQ,48395
+sqlalchemy/orm/loading.py,sha256=F1ZEHTPBglmznST2nGj_0ARccoFgTyaOOwjcqpYeuvM,57366
+sqlalchemy/orm/mapped_collection.py,sha256=ZgYHaF37yo6-gZ7Da1Gg25rMgG2GynAy-RJoDhljV5g,19698
+sqlalchemy/orm/mapper.py,sha256=kyq4pBkTvvEqlW4H4XK_ktP1sOiALNAycgvF5f-xtqw,170969
+sqlalchemy/orm/path_registry.py,sha256=olyutgn0uNB7Wi32YNQx9ZHV6sUgV3TbyGplfSxfZ6g,25938
+sqlalchemy/orm/persistence.py,sha256=qr1jUgo-NZ0tLa5eIis2271QDt4KNJwYlYU_9CaKNhQ,60545
+sqlalchemy/orm/properties.py,sha256=dt1Gy06pbRY6zgm4QGR9nU6z2WCyoTZWBJYKpUhLq_c,29095
+sqlalchemy/orm/query.py,sha256=VBSD0k15xU_XykggvLGAwGdwNglBAoBKbOk8qAoMKdI,117714
+sqlalchemy/orm/relationships.py,sha256=wrHyICb8A5qPoyxf-nITQVJ13kCNr2MedDqEY8QMSt8,127816
+sqlalchemy/orm/scoping.py,sha256=75iPEWDFhPcIXgl8EUd_sPTCL6punfegEaTRE5mP3e8,78835
+sqlalchemy/orm/session.py,sha256=TeBcZNdY4HWQFdXNCIqbsQTtkvfJkBweMzvA9p3BiPA,193279
+sqlalchemy/orm/state.py,sha256=EaWkVNWHaDeJ_FZGXHakSamUk51BXmtMWLGdFhlJmh8,37536
+sqlalchemy/orm/state_changes.py,sha256=pqkjSDOR6H5BufMKdzFUIatDp3DY90SovOJiJ1k6Ayw,6815
+sqlalchemy/orm/strategies.py,sha256=V0o-1kB1IVTxhOGqGtRyjddZqAbPdsl_h-k0N3MKCGo,114052
+sqlalchemy/orm/strategy_options.py,sha256=EmgH28uMQhwwBCDVcXmywLk_Q8AbpnK02seMsMV4nmc,84102
+sqlalchemy/orm/sync.py,sha256=5Nt_OqP4IfhAtHwFRar4dw-YjLENRLvp4d3jDC4wpnw,5749
+sqlalchemy/orm/unitofwork.py,sha256=Wk5YZocBbxe4m1wU2aFQ7gY1Cp5CROi13kDEM1iOSz4,27033
+sqlalchemy/orm/util.py,sha256=7hCRYbQjqhWJTkrPf_NXY9zF_18VWTpyguu-nfYfc6c,80340
+sqlalchemy/orm/writeonly.py,sha256=WCPXCAwHqVCfhVWXQEFCP3OocIiHgqNJ5KnuJwSgGq4,22329
+sqlalchemy/pool/__init__.py,sha256=CIv4b6ctueY7w3sML_LxyLKAdl59esYOhz3O7W5w7WE,1815
+sqlalchemy/pool/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/pool/__pycache__/base.cpython-312.pyc,,
+sqlalchemy/pool/__pycache__/events.cpython-312.pyc,,
+sqlalchemy/pool/__pycache__/impl.cpython-312.pyc,,
+sqlalchemy/pool/base.py,sha256=wuwKIak5d_4-TqKI2RFN8OYMEyOvV0djnoSVR8gbxAQ,52249
+sqlalchemy/pool/events.py,sha256=IcWfORKbHM69Z9FdPJlXI7-NIhQrR9O_lg59tiUdTRU,13148
+sqlalchemy/pool/impl.py,sha256=vU0n82a7uxdE34p3hU7cvUDA5QDy9MkIv1COT4kYFP8,17724
+sqlalchemy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+sqlalchemy/schema.py,sha256=mt74CGCBtfv_qI1_6zzNFMexYGyWDj2Jkh-XdH4kEWI,3194
+sqlalchemy/sql/__init__.py,sha256=jAQx9rwhyPhoSjntM1BZSElJiMRmLowGThJVDGvExSU,5820
+sqlalchemy/sql/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/_dml_constructors.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/_elements_constructors.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/_orm_types.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/_py_util.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/_selectable_constructors.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/_typing.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/annotation.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/base.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/cache_key.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/coercions.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/compiler.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/crud.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/ddl.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/default_comparator.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/dml.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/elements.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/events.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/expression.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/functions.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/lambdas.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/naming.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/operators.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/roles.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/schema.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/selectable.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/sqltypes.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/traversals.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/type_api.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/util.cpython-312.pyc,,
+sqlalchemy/sql/__pycache__/visitors.cpython-312.pyc,,
+sqlalchemy/sql/_dml_constructors.py,sha256=hoNyINY3FNi1ZQajR6lbcRN7oYsNghM1wuzzVWxIv3c,3867
+sqlalchemy/sql/_elements_constructors.py,sha256=-qksx59Gqhmzxo1xByPtZZboNvL8uYcCN14pjHYHxL8,62914
+sqlalchemy/sql/_orm_types.py,sha256=_vR3_HQYgZR_of6_ZpTQByie2gaVScxQjVAVWAP3Ztg,620
+sqlalchemy/sql/_py_util.py,sha256=iiwgX3dQhOjdB5-10jtgHPIdibUqGk49bC1qdZMBpYI,2173
+sqlalchemy/sql/_selectable_constructors.py,sha256=RDqgejqiUuU12Be1jBpMIx_YdJho8fhKfnMoJLPFTFE,18812
+sqlalchemy/sql/_typing.py,sha256=C8kNZQ3TIpM-Q12Of3tTaESB1UxIfRME_lXouqgwMT8,12252
+sqlalchemy/sql/annotation.py,sha256=pTNidcQatCar6H1I9YAoPP1e6sOewaJ15B7_-7ykZOE,18271
+sqlalchemy/sql/base.py,sha256=dVvZoPoa3pb6iuwTU4QoCvVWQPyHZthaekl5J2zV_SU,73928
+sqlalchemy/sql/cache_key.py,sha256=Dl163qHjTkMCa5LTipZud8X3w0d8DvdIvGvv4AqriHE,32823
+sqlalchemy/sql/coercions.py,sha256=ju8xEi7b9G_GzxaQ6Nwu0cFIWFZ--ottIVfdiuhHY7Y,40553
+sqlalchemy/sql/compiler.py,sha256=9Wx423H72Yq7NHR8cmMAH6GpMCJmghs1L85YJqs_Lng,268763
+sqlalchemy/sql/crud.py,sha256=nyAPlmvuyWxMqSBdWPffC5P3CGXTQKK0bJoDbNgB3iQ,56457
+sqlalchemy/sql/ddl.py,sha256=XuUhulJLvvPjU4nYD6N42QLg8rEgquD6Jwn_yIHZejk,45542
+sqlalchemy/sql/default_comparator.py,sha256=SE0OaK1BlY0RinQ21ZXJOUGkO00oGv6GMMmAH-4iNTQ,16663
+sqlalchemy/sql/dml.py,sha256=eftbzdFJgMk7NV0BHKfK4dQ2R7XsyyJn6fCgYFJ0KNQ,65728
+sqlalchemy/sql/elements.py,sha256=dsNa2K57RygsGoaWuTMPp2QQ6SU3uZXSMW6CLGBbcIY,171208
+sqlalchemy/sql/events.py,sha256=xe3vJ6pQJau3dJWBAY0zU7Lz52UKuMrpLycriLm3AWA,18301
+sqlalchemy/sql/expression.py,sha256=baMnCH04jeE8E3tA2TovXlsREocA2j3fdHKnzOB8H4U,7586
+sqlalchemy/sql/functions.py,sha256=AcI_KstJxeLw6rEXx6QnIgR2rq4Ru6RXMbq4EIIUURA,55319
+sqlalchemy/sql/lambdas.py,sha256=EfDdUBi5cSmkjz8pQCSRo858UWQCFNZxXkM-1qS0CgU,49281
+sqlalchemy/sql/naming.py,sha256=l8udFP2wvXLgehIB0uF2KXwpkXSVSREDk6fLCH9F-XY,6865
+sqlalchemy/sql/operators.py,sha256=BYATjkBQLJAmwHAlGUSV-dv9RLtGw_ziAvFbKDrN4YU,76107
+sqlalchemy/sql/roles.py,sha256=71zm_xpRkUdnu-WzG6lxQVnFHwvUjf6X6e3kRIkbzAs,7686
+sqlalchemy/sql/schema.py,sha256=TOBTbcRY6ehosJEcpYn2NX0_UGZP9lfFs-o8lJVc5tI,228104
+sqlalchemy/sql/selectable.py,sha256=9dO2yhN83zjna7nPjOE1hcvGyJGjc_lj5SAz7SP5CBQ,233041
+sqlalchemy/sql/sqltypes.py,sha256=_0FpFLH0AFueb3TIB5Vcx9nXWDNj31XFQTP0u8OXnSo,126540
+sqlalchemy/sql/traversals.py,sha256=7b98JSeLxqecmGHhhLXT_2M4QMke6W-xCci5RXndhxI,33521
+sqlalchemy/sql/type_api.py,sha256=D9Kq-ppwZvlNmxaHqvVmM8IVg4n6_erzJpVioye9WKE,83823
+sqlalchemy/sql/util.py,sha256=lBEAf_-eRepTErOBCp1PbEMZDYdJqAiK1GemQtgojYo,48175
+sqlalchemy/sql/visitors.py,sha256=KD1qOYm6RdftCufVGB8q6jFTIZIQKS3zPCg78cVV0mQ,36427
+sqlalchemy/testing/__init__.py,sha256=9M2SMxBBLJ8xLUWXNCWDzkcvOqFznWcJzrSd712vATU,3126
+sqlalchemy/testing/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/testing/__pycache__/assertions.cpython-312.pyc,,
+sqlalchemy/testing/__pycache__/assertsql.cpython-312.pyc,,
+sqlalchemy/testing/__pycache__/asyncio.cpython-312.pyc,,
+sqlalchemy/testing/__pycache__/config.cpython-312.pyc,,
+sqlalchemy/testing/__pycache__/engines.cpython-312.pyc,,
+sqlalchemy/testing/__pycache__/entities.cpython-312.pyc,,
+sqlalchemy/testing/__pycache__/exclusions.cpython-312.pyc,,
+sqlalchemy/testing/__pycache__/pickleable.cpython-312.pyc,,
+sqlalchemy/testing/__pycache__/profiling.cpython-312.pyc,,
+sqlalchemy/testing/__pycache__/provision.cpython-312.pyc,,
+sqlalchemy/testing/__pycache__/requirements.cpython-312.pyc,,
+sqlalchemy/testing/__pycache__/schema.cpython-312.pyc,,
+sqlalchemy/testing/__pycache__/util.cpython-312.pyc,,
+sqlalchemy/testing/__pycache__/warnings.cpython-312.pyc,,
+sqlalchemy/testing/assertions.py,sha256=lNNZ-gfF4TDRXmB7hZDdch7JYZRb_qWGeqWDFKtopx0,31439
+sqlalchemy/testing/assertsql.py,sha256=EIVk3i5qjiSI63c1ikTPoGhulZl88SSeOS2VNo1LJvM,16817
+sqlalchemy/testing/asyncio.py,sha256=cAw68tzu3h5wjdIKfOqhFATcbMb38XeK0ThjIalUHuQ,3728
+sqlalchemy/testing/config.py,sha256=MZOWz7wqzc1pbwHWSAR0RJkt2C-SD6ox-nYY7VHdi_U,12030
+sqlalchemy/testing/engines.py,sha256=w5-0FbanItRsOt6x4n7wM_OnToCzJnrvZZ2hk5Yzng8,13355
+sqlalchemy/testing/entities.py,sha256=rysywsnjXHlIIC-uv0L7-fLmTAuNpHJvcSd1HeAdY5M,3354
+sqlalchemy/testing/exclusions.py,sha256=uoYLEwyNOK1eR8rpfOZ2Q3dxgY0akM-RtsIFML-FPrY,12444
+sqlalchemy/testing/fixtures/__init__.py,sha256=9snVns5A7g28LqC6gqQuO4xRBoJzdnf068GQ6Cae75I,1198
+sqlalchemy/testing/fixtures/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/testing/fixtures/__pycache__/base.cpython-312.pyc,,
+sqlalchemy/testing/fixtures/__pycache__/mypy.cpython-312.pyc,,
+sqlalchemy/testing/fixtures/__pycache__/orm.cpython-312.pyc,,
+sqlalchemy/testing/fixtures/__pycache__/sql.cpython-312.pyc,,
+sqlalchemy/testing/fixtures/base.py,sha256=OayRr25soCqj1_yc665D5XbWWzFCm7Xl9Txtps953p4,12256
+sqlalchemy/testing/fixtures/mypy.py,sha256=7fWVZzYzNjqmLIoFa-MmXSGDPS3eZYFXlH-WxaxBDDY,11845
+sqlalchemy/testing/fixtures/orm.py,sha256=x27qjpK54JETATcYuiphtW-HXRy8ej8h3aCDkeQXPfY,6095
+sqlalchemy/testing/fixtures/sql.py,sha256=Q7Qq0n4qTT681nWt5DqjThopgjv5BB2KmSmrmAxUqHM,15704
+sqlalchemy/testing/pickleable.py,sha256=B9dXGF7E2PywB67SngHPjSMIBDTFhyAV4rkDUcyMulk,2833
+sqlalchemy/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+sqlalchemy/testing/plugin/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-312.pyc,,
+sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-312.pyc,,
+sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-312.pyc,,
+sqlalchemy/testing/plugin/bootstrap.py,sha256=GrBB27KbswjE3Tt-zJlj6uSqGh9N-_CXkonnJSSBz84,1437
+sqlalchemy/testing/plugin/plugin_base.py,sha256=4SizjghFdDddt5o5gQ16Nw0bJHrtuBa4smxJcea-ti8,21573
+sqlalchemy/testing/plugin/pytestplugin.py,sha256=yh4PP406O0TwPMDzpJHpcNdU2WHXCLYI10F3oOLePjE,27295
+sqlalchemy/testing/profiling.py,sha256=HPjYvRLT1nD90FCZ7AA8j9ygkMtf1SGA47Xze2QPueo,10148
+sqlalchemy/testing/provision.py,sha256=w4F_ceGHPpWHUeh6cVcE5ktCC-ISrGc2yOSnXauOd5U,14200
+sqlalchemy/testing/requirements.py,sha256=gkviA8f5p4qdoDwAK791I4oGvnEqlm0ZZwJZpJzobFY,51393
+sqlalchemy/testing/schema.py,sha256=OSfMoIJ7ORbevGkeJdrKcTrQ0s7wXebuCU08mC1Y9jA,6513
+sqlalchemy/testing/suite/__init__.py,sha256=_firVc2uS3TMZ3vH2baQzNb17ubM78RHtb9kniSybmk,476
+sqlalchemy/testing/suite/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_cte.cpython-312.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-312.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_deprecations.cpython-312.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-312.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_insert.cpython-312.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-312.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_results.cpython-312.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_rowcount.cpython-312.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_select.cpython-312.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-312.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_types.cpython-312.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_unicode_ddl.cpython-312.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-312.pyc,,
+sqlalchemy/testing/suite/test_cte.py,sha256=O5idVeBnHm9zdiG3tuCBUn4hYU_TA63-6LNnRygr8g0,6205
+sqlalchemy/testing/suite/test_ddl.py,sha256=xWimTjggpTe3S1Xfmt_IPofTXkUUcKuVSVCIfIyGMbA,11785
+sqlalchemy/testing/suite/test_deprecations.py,sha256=XI8ZU1NxC-6uvPDImaaq9O7Ov6MF5gmy-yk3TfesLAo,5082
+sqlalchemy/testing/suite/test_dialect.py,sha256=HUpHZb7pnHbsoRpDLONpsCO_oWhBgjglU9pBO-EOUw4,22673
+sqlalchemy/testing/suite/test_insert.py,sha256=Wm_pW0qqUNV1Fs7mXoxtmaTHMQGmaVDgDsYgZs1jlxM,18308
+sqlalchemy/testing/suite/test_reflection.py,sha256=Nd4Ao_J3Sr-VeAeWbUe3gs6STPvik9DC37WkyJc-PVg,106205
+sqlalchemy/testing/suite/test_results.py,sha256=Hd6R4jhBNNQSp0xGa8wwTgpw-XUrCEZ3dWXpoZ4_DKs,15687
+sqlalchemy/testing/suite/test_rowcount.py,sha256=zhKVv0ibFSQmnE5luLwgHAn840zOJ6HxtkR3oL995cs,7652
+sqlalchemy/testing/suite/test_select.py,sha256=QHsBX16EZpxlEZZLM0pMNcwayPU0dig39McKwiiith0,58325
+sqlalchemy/testing/suite/test_sequence.py,sha256=c80CBWrU930GPnPfr9TCRbTTuITR7BpIactncLIj2XU,9672
+sqlalchemy/testing/suite/test_types.py,sha256=QjV48MqR7dB8UVzt56UL2z7Nt28-IhywX3DKuQeLYsY,65429
+sqlalchemy/testing/suite/test_unicode_ddl.py,sha256=7obItCpFt4qlWaDqe25HWgQT6FoUhgz1W7_Xycfz9Xk,5887
+sqlalchemy/testing/suite/test_update_delete.py,sha256=1hT0BTxB4SNipd6hnVlMnq25dLtQQoXov7z7UR0Sgi8,3658
+sqlalchemy/testing/util.py,sha256=Wsu4GZgCW6wX9mmxfiffhDz1cZm3778OB3LtiWNgb3Y,14080
+sqlalchemy/testing/warnings.py,sha256=pmfT33PF1q1PI7DdHOsup3LxHq1AC4-aYl1oL8HmrYo,1546
+sqlalchemy/types.py,sha256=DgBpPaT-vtsn6_glx5wocrIhR2A1vy56SQNRY3NiPUw,3168
+sqlalchemy/util/__init__.py,sha256=Bh0SkfkeCsz6-rbDmC41lAWOuCvKCiXVZthN2cWJEXk,8245
+sqlalchemy/util/__pycache__/__init__.cpython-312.pyc,,
+sqlalchemy/util/__pycache__/_collections.cpython-312.pyc,,
+sqlalchemy/util/__pycache__/_concurrency_py3k.cpython-312.pyc,,
+sqlalchemy/util/__pycache__/_has_cy.cpython-312.pyc,,
+sqlalchemy/util/__pycache__/_py_collections.cpython-312.pyc,,
+sqlalchemy/util/__pycache__/compat.cpython-312.pyc,,
+sqlalchemy/util/__pycache__/concurrency.cpython-312.pyc,,
+sqlalchemy/util/__pycache__/deprecations.cpython-312.pyc,,
+sqlalchemy/util/__pycache__/langhelpers.cpython-312.pyc,,
+sqlalchemy/util/__pycache__/preloaded.cpython-312.pyc,,
+sqlalchemy/util/__pycache__/queue.cpython-312.pyc,,
+sqlalchemy/util/__pycache__/tool_support.cpython-312.pyc,,
+sqlalchemy/util/__pycache__/topological.cpython-312.pyc,,
+sqlalchemy/util/__pycache__/typing.cpython-312.pyc,,
+sqlalchemy/util/_collections.py,sha256=FYqVQg3CaqiEd21OFN1pNCfFbQ8gvlchW_TMtihSFNE,20169
+sqlalchemy/util/_concurrency_py3k.py,sha256=31vs1oXaLzeTRgmOXRrWToRQskWmJk-CBs3-JxSTcck,8223
+sqlalchemy/util/_has_cy.py,sha256=XMkeqCDGmhkd0uuzpCdyELz7gOjHxyFQ1AIlc5NneoY,1229
+sqlalchemy/util/_py_collections.py,sha256=cYjsYLCLBy5jdGBJATLJCmtfzr_AaJ-HKTUN8OdAzxY,16630
+sqlalchemy/util/compat.py,sha256=FkeHnW9asJYJvNmxVltee8jQNwQSdVRdKJlVRRInJI4,9388
+sqlalchemy/util/concurrency.py,sha256=ZxcQYOKy-GBsQkPmCrBO5MzMpqW3JZme2Hiyqpbt9uc,2284
+sqlalchemy/util/deprecations.py,sha256=pr9DSAf1ECqDk7X7F6TNc1jrhOeFihL33uEb5Wt2_T0,11971
+sqlalchemy/util/langhelpers.py,sha256=CQQP2Q9c68nL5mcWL-Q38-INrtoDHDnBmq7QhnWyEDM,64980
+sqlalchemy/util/preloaded.py,sha256=KKNLJEqChDW1TNUsM_TzKu7JYEA3kkuh2N-quM_2_Y4,5905
+sqlalchemy/util/queue.py,sha256=ITejs6KS4Hz_ojrss2oFeUO9MoIeR3qWmZQ8J7yyrNU,10205
+sqlalchemy/util/tool_support.py,sha256=epm8MzDZpVmhE6LIjrjJrP8BUf12Wab2m28A9lGq95s,5969
+sqlalchemy/util/topological.py,sha256=hjJWL3C_B7Rpv9s7jj7wcTckcZUSkxc6xRDhiN1xyec,3458
+sqlalchemy/util/typing.py,sha256=ESYm4oQtt-SarN04YTXCgovXT8tFupMiPmuGCDCMEIc,15831
diff --git a/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/REQUESTED b/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/REQUESTED
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/WHEEL
new file mode 100644
index 00000000..c5825c52
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.41.3)
+Root-Is-Purelib: false
+Tag: cp312-cp312-manylinux_2_17_x86_64
+Tag: cp312-cp312-manylinux2014_x86_64
+
diff --git a/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/top_level.txt b/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/top_level.txt
new file mode 100644
index 00000000..39fb2bef
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/SQLAlchemy-2.0.23.dist-info/top_level.txt
@@ -0,0 +1 @@
+sqlalchemy
diff --git a/Backend/venv/lib/python3.12/site-packages/__pycache__/six.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/__pycache__/six.cpython-312.pyc
new file mode 100644
index 00000000..ad6169f3
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/__pycache__/six.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc
new file mode 100644
index 00000000..44aa9280
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/__pycache__/typing_extensions.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/_cffi_backend.cpython-312-x86_64-linux-gnu.so b/Backend/venv/lib/python3.12/site-packages/_cffi_backend.cpython-312-x86_64-linux-gnu.so
new file mode 100755
index 00000000..156ee431
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_cffi_backend.cpython-312-x86_64-linux-gnu.so differ
diff --git a/Backend/venv/lib/python3.12/site-packages/_yaml/__init__.py b/Backend/venv/lib/python3.12/site-packages/_yaml/__init__.py
new file mode 100644
index 00000000..7baa8c4b
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/_yaml/__init__.py
@@ -0,0 +1,33 @@
+# This is a stub package designed to roughly emulate the _yaml
+# extension module, which previously existed as a standalone module
+# and has been moved into the `yaml` package namespace.
+# It does not perfectly mimic its old counterpart, but should get
+# close enough for anyone who's relying on it even when they shouldn't.
+import yaml
+
+# in some circumstances, the yaml module we imoprted may be from a different version, so we need
+# to tread carefully when poking at it here (it may not have the attributes we expect)
+if not getattr(yaml, '__with_libyaml__', False):
+ from sys import version_info
+
+ exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError
+ raise exc("No module named '_yaml'")
+else:
+ from yaml._yaml import *
+ import warnings
+ warnings.warn(
+ 'The _yaml extension module is now located at yaml._yaml'
+ ' and its location is subject to change. To use the'
+ ' LibYAML-based parser and emitter, import from `yaml`:'
+ ' `from yaml import CLoader as Loader, CDumper as Dumper`.',
+ DeprecationWarning
+ )
+ del warnings
+ # Don't `del yaml` here because yaml is actually an existing
+ # namespace member of _yaml.
+
+__name__ = '_yaml'
+# If the module is top-level (i.e. not a part of any specific package)
+# then the attribute should be set to ''.
+# https://docs.python.org/3.8/library/types.html
+__package__ = ''
diff --git a/Backend/venv/lib/python3.12/site-packages/_yaml/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/_yaml/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..96bab9b8
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/_yaml/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/METADATA
new file mode 100644
index 00000000..61b6e3ee
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/METADATA
@@ -0,0 +1,291 @@
+Metadata-Version: 2.1
+Name: aiofiles
+Version: 23.2.1
+Summary: File support for asyncio.
+Project-URL: Changelog, https://github.com/Tinche/aiofiles#history
+Project-URL: Bug Tracker, https://github.com/Tinche/aiofiles/issues
+Project-URL: repository, https://github.com/Tinche/aiofiles
+Author-email: Tin Tvrtkovic
+License: Apache-2.0
+License-File: LICENSE
+License-File: NOTICE
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Framework :: AsyncIO
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Requires-Python: >=3.7
+Description-Content-Type: text/markdown
+
+# aiofiles: file support for asyncio
+
+[](https://pypi.python.org/pypi/aiofiles)
+[](https://github.com/Tinche/aiofiles/actions)
+[](https://github.com/Tinche/aiofiles/actions/workflows/main.yml)
+[](https://github.com/Tinche/aiofiles)
+[](https://github.com/psf/black)
+
+**aiofiles** is an Apache2 licensed library, written in Python, for handling local
+disk files in asyncio applications.
+
+Ordinary local file IO is blocking, and cannot easily and portably be made
+asynchronous. This means doing file IO may interfere with asyncio applications,
+which shouldn't block the executing thread. aiofiles helps with this by
+introducing asynchronous versions of files that support delegating operations to
+a separate thread pool.
+
+```python
+async with aiofiles.open('filename', mode='r') as f:
+ contents = await f.read()
+print(contents)
+'My file contents'
+```
+
+Asynchronous iteration is also supported.
+
+```python
+async with aiofiles.open('filename') as f:
+ async for line in f:
+ ...
+```
+
+Asynchronous interface to tempfile module.
+
+```python
+async with aiofiles.tempfile.TemporaryFile('wb') as f:
+ await f.write(b'Hello, World!')
+```
+
+## Features
+
+- a file API very similar to Python's standard, blocking API
+- support for buffered and unbuffered binary files, and buffered text files
+- support for `async`/`await` ([PEP 492](https://peps.python.org/pep-0492/)) constructs
+- async interface to tempfile module
+
+## Installation
+
+To install aiofiles, simply:
+
+```bash
+$ pip install aiofiles
+```
+
+## Usage
+
+Files are opened using the `aiofiles.open()` coroutine, which in addition to
+mirroring the builtin `open` accepts optional `loop` and `executor`
+arguments. If `loop` is absent, the default loop will be used, as per the
+set asyncio policy. If `executor` is not specified, the default event loop
+executor will be used.
+
+In case of success, an asynchronous file object is returned with an
+API identical to an ordinary file, except the following methods are coroutines
+and delegate to an executor:
+
+- `close`
+- `flush`
+- `isatty`
+- `read`
+- `readall`
+- `read1`
+- `readinto`
+- `readline`
+- `readlines`
+- `seek`
+- `seekable`
+- `tell`
+- `truncate`
+- `writable`
+- `write`
+- `writelines`
+
+In case of failure, one of the usual exceptions will be raised.
+
+`aiofiles.stdin`, `aiofiles.stdout`, `aiofiles.stderr`,
+`aiofiles.stdin_bytes`, `aiofiles.stdout_bytes`, and
+`aiofiles.stderr_bytes` provide async access to `sys.stdin`,
+`sys.stdout`, `sys.stderr`, and their corresponding `.buffer` properties.
+
+The `aiofiles.os` module contains executor-enabled coroutine versions of
+several useful `os` functions that deal with files:
+
+- `stat`
+- `statvfs`
+- `sendfile`
+- `rename`
+- `renames`
+- `replace`
+- `remove`
+- `unlink`
+- `mkdir`
+- `makedirs`
+- `rmdir`
+- `removedirs`
+- `link`
+- `symlink`
+- `readlink`
+- `listdir`
+- `scandir`
+- `access`
+- `path.exists`
+- `path.isfile`
+- `path.isdir`
+- `path.islink`
+- `path.ismount`
+- `path.getsize`
+- `path.getatime`
+- `path.getctime`
+- `path.samefile`
+- `path.sameopenfile`
+
+### Tempfile
+
+**aiofiles.tempfile** implements the following interfaces:
+
+- TemporaryFile
+- NamedTemporaryFile
+- SpooledTemporaryFile
+- TemporaryDirectory
+
+Results return wrapped with a context manager allowing use with async with and async for.
+
+```python
+async with aiofiles.tempfile.NamedTemporaryFile('wb+') as f:
+ await f.write(b'Line1\n Line2')
+ await f.seek(0)
+ async for line in f:
+ print(line)
+
+async with aiofiles.tempfile.TemporaryDirectory() as d:
+ filename = os.path.join(d, "file.ext")
+```
+
+### Writing tests for aiofiles
+
+Real file IO can be mocked by patching `aiofiles.threadpool.sync_open`
+as desired. The return type also needs to be registered with the
+`aiofiles.threadpool.wrap` dispatcher:
+
+```python
+aiofiles.threadpool.wrap.register(mock.MagicMock)(
+ lambda *args, **kwargs: threadpool.AsyncBufferedIOBase(*args, **kwargs))
+
+async def test_stuff():
+ data = 'data'
+ mock_file = mock.MagicMock()
+
+ with mock.patch('aiofiles.threadpool.sync_open', return_value=mock_file) as mock_open:
+ async with aiofiles.open('filename', 'w') as f:
+ await f.write(data)
+
+ mock_file.write.assert_called_once_with(data)
+```
+
+### History
+
+#### 23.2.1 (2023-08-09)
+
+- Import `os.statvfs` conditionally to fix importing on non-UNIX systems.
+ [#171](https://github.com/Tinche/aiofiles/issues/171) [#172](https://github.com/Tinche/aiofiles/pull/172)
+
+#### 23.2.0 (2023-08-09)
+
+- aiofiles is now tested on Python 3.12 too.
+ [#166](https://github.com/Tinche/aiofiles/issues/166) [#168](https://github.com/Tinche/aiofiles/pull/168)
+- On Python 3.12, `aiofiles.tempfile.NamedTemporaryFile` now accepts a `delete_on_close` argument, just like the stdlib version.
+- On Python 3.12, `aiofiles.tempfile.NamedTemporaryFile` no longer exposes a `delete` attribute, just like the stdlib version.
+- Added `aiofiles.os.statvfs` and `aiofiles.os.path.ismount`.
+ [#162](https://github.com/Tinche/aiofiles/pull/162)
+- Use [PDM](https://pdm.fming.dev/latest/) instead of Poetry.
+ [#169](https://github.com/Tinche/aiofiles/pull/169)
+
+#### 23.1.0 (2023-02-09)
+
+- Added `aiofiles.os.access`.
+ [#146](https://github.com/Tinche/aiofiles/pull/146)
+- Removed `aiofiles.tempfile.temptypes.AsyncSpooledTemporaryFile.softspace`.
+ [#151](https://github.com/Tinche/aiofiles/pull/151)
+- Added `aiofiles.stdin`, `aiofiles.stdin_bytes`, and other stdio streams.
+ [#154](https://github.com/Tinche/aiofiles/pull/154)
+- Transition to `asyncio.get_running_loop` (vs `asyncio.get_event_loop`) internally.
+
+#### 22.1.0 (2022-09-04)
+
+- Added `aiofiles.os.path.islink`.
+ [#126](https://github.com/Tinche/aiofiles/pull/126)
+- Added `aiofiles.os.readlink`.
+ [#125](https://github.com/Tinche/aiofiles/pull/125)
+- Added `aiofiles.os.symlink`.
+ [#124](https://github.com/Tinche/aiofiles/pull/124)
+- Added `aiofiles.os.unlink`.
+ [#123](https://github.com/Tinche/aiofiles/pull/123)
+- Added `aiofiles.os.link`.
+ [#121](https://github.com/Tinche/aiofiles/pull/121)
+- Added `aiofiles.os.renames`.
+ [#120](https://github.com/Tinche/aiofiles/pull/120)
+- Added `aiofiles.os.{listdir, scandir}`.
+ [#143](https://github.com/Tinche/aiofiles/pull/143)
+- Switched to CalVer.
+- Dropped Python 3.6 support. If you require it, use version 0.8.0.
+- aiofiles is now tested on Python 3.11.
+
+#### 0.8.0 (2021-11-27)
+
+- aiofiles is now tested on Python 3.10.
+- Added `aiofiles.os.replace`.
+ [#107](https://github.com/Tinche/aiofiles/pull/107)
+- Added `aiofiles.os.{makedirs, removedirs}`.
+- Added `aiofiles.os.path.{exists, isfile, isdir, getsize, getatime, getctime, samefile, sameopenfile}`.
+ [#63](https://github.com/Tinche/aiofiles/pull/63)
+- Added `suffix`, `prefix`, `dir` args to `aiofiles.tempfile.TemporaryDirectory`.
+ [#116](https://github.com/Tinche/aiofiles/pull/116)
+
+#### 0.7.0 (2021-05-17)
+
+- Added the `aiofiles.tempfile` module for async temporary files.
+ [#56](https://github.com/Tinche/aiofiles/pull/56)
+- Switched to Poetry and GitHub actions.
+- Dropped 3.5 support.
+
+#### 0.6.0 (2020-10-27)
+
+- `aiofiles` is now tested on ppc64le.
+- Added `name` and `mode` properties to async file objects.
+ [#82](https://github.com/Tinche/aiofiles/pull/82)
+- Fixed a DeprecationWarning internally.
+ [#75](https://github.com/Tinche/aiofiles/pull/75)
+- Python 3.9 support and tests.
+
+#### 0.5.0 (2020-04-12)
+
+- Python 3.8 support. Code base modernization (using `async/await` instead of `asyncio.coroutine`/`yield from`).
+- Added `aiofiles.os.remove`, `aiofiles.os.rename`, `aiofiles.os.mkdir`, `aiofiles.os.rmdir`.
+ [#62](https://github.com/Tinche/aiofiles/pull/62)
+
+#### 0.4.0 (2018-08-11)
+
+- Python 3.7 support.
+- Removed Python 3.3/3.4 support. If you use these versions, stick to aiofiles 0.3.x.
+
+#### 0.3.2 (2017-09-23)
+
+- The LICENSE is now included in the sdist.
+ [#31](https://github.com/Tinche/aiofiles/pull/31)
+
+#### 0.3.1 (2017-03-10)
+
+- Introduced a changelog.
+- `aiofiles.os.sendfile` will now work if the standard `os` module contains a `sendfile` function.
+
+### Contributing
+
+Contributions are very welcome. Tests can be run with `tox`, please ensure
+the coverage at least stays the same before you submit a pull request.
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/RECORD
new file mode 100644
index 00000000..16f81996
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/RECORD
@@ -0,0 +1,27 @@
+aiofiles-23.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+aiofiles-23.2.1.dist-info/METADATA,sha256=cot28p_PNjdl_MK--l9Qu2e6QOv9OxdHrKbjLmYf9Uw,9673
+aiofiles-23.2.1.dist-info/RECORD,,
+aiofiles-23.2.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+aiofiles-23.2.1.dist-info/WHEEL,sha256=KGYbc1zXlYddvwxnNty23BeaKzh7YuoSIvIMO4jEhvw,87
+aiofiles-23.2.1.dist-info/licenses/LICENSE,sha256=y16Ofl9KOYjhBjwULGDcLfdWBfTEZRXnduOspt-XbhQ,11325
+aiofiles-23.2.1.dist-info/licenses/NOTICE,sha256=EExY0dRQvWR0wJ2LZLwBgnM6YKw9jCU-M0zegpRSD_E,55
+aiofiles/__init__.py,sha256=1iAMJQyJtX3LGIS0AoFTJeO1aJ_RK2jpBSBhg0VoIrE,344
+aiofiles/__pycache__/__init__.cpython-312.pyc,,
+aiofiles/__pycache__/base.cpython-312.pyc,,
+aiofiles/__pycache__/os.cpython-312.pyc,,
+aiofiles/__pycache__/ospath.cpython-312.pyc,,
+aiofiles/base.py,sha256=rZwA151Ji8XlBkzvDmcF1CgDTY2iKNuJMfvNlM0s0E0,2684
+aiofiles/os.py,sha256=zuFGaIyGCGUuFb7trFFEm6SLdCRqTFsSV0mY6SO8z3M,970
+aiofiles/ospath.py,sha256=zqG2VFzRb6yYiIOWipqsdgvZmoMTFvZmBdkxkAl1FT4,764
+aiofiles/tempfile/__init__.py,sha256=hFSNTOjOUv371Ozdfy6FIxeln46Nm3xOVh4ZR3Q94V0,10244
+aiofiles/tempfile/__pycache__/__init__.cpython-312.pyc,,
+aiofiles/tempfile/__pycache__/temptypes.cpython-312.pyc,,
+aiofiles/tempfile/temptypes.py,sha256=ddEvNjMLVlr7WUILCe6ypTqw77yREeIonTk16Uw_NVs,2093
+aiofiles/threadpool/__init__.py,sha256=c_aexl1t193iKdPZaolPEEbHDrQ0RrsH_HTAToMPQBo,3171
+aiofiles/threadpool/__pycache__/__init__.cpython-312.pyc,,
+aiofiles/threadpool/__pycache__/binary.cpython-312.pyc,,
+aiofiles/threadpool/__pycache__/text.cpython-312.pyc,,
+aiofiles/threadpool/__pycache__/utils.cpython-312.pyc,,
+aiofiles/threadpool/binary.py,sha256=hp-km9VCRu0MLz_wAEUfbCz7OL7xtn9iGAawabpnp5U,2315
+aiofiles/threadpool/text.py,sha256=fNmpw2PEkj0BZSldipJXAgZqVGLxALcfOMiuDQ54Eas,1223
+aiofiles/threadpool/utils.py,sha256=B59dSZwO_WZs2dFFycKeA91iD2Xq2nNw1EFF8YMBI5k,1868
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/REQUESTED b/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/REQUESTED
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/WHEEL
new file mode 100644
index 00000000..9a7c9d3a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: hatchling 1.17.1
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/licenses/LICENSE b/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/licenses/LICENSE
new file mode 100644
index 00000000..e06d2081
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/licenses/LICENSE
@@ -0,0 +1,202 @@
+Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/licenses/NOTICE b/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/licenses/NOTICE
new file mode 100644
index 00000000..d134f281
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiofiles-23.2.1.dist-info/licenses/NOTICE
@@ -0,0 +1,2 @@
+Asyncio support for files
+Copyright 2016 Tin Tvrtkovic
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/__init__.py b/Backend/venv/lib/python3.12/site-packages/aiofiles/__init__.py
new file mode 100644
index 00000000..9e751114
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiofiles/__init__.py
@@ -0,0 +1,22 @@
+"""Utilities for asyncio-friendly file handling."""
+from .threadpool import (
+ open,
+ stdin,
+ stdout,
+ stderr,
+ stdin_bytes,
+ stdout_bytes,
+ stderr_bytes,
+)
+from . import tempfile
+
+__all__ = [
+ "open",
+ "tempfile",
+ "stdin",
+ "stdout",
+ "stderr",
+ "stdin_bytes",
+ "stdout_bytes",
+ "stderr_bytes",
+]
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiofiles/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..cebf125b
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiofiles/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/__pycache__/base.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiofiles/__pycache__/base.cpython-312.pyc
new file mode 100644
index 00000000..1c535fda
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiofiles/__pycache__/base.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/__pycache__/os.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiofiles/__pycache__/os.cpython-312.pyc
new file mode 100644
index 00000000..58ba5969
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiofiles/__pycache__/os.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/__pycache__/ospath.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiofiles/__pycache__/ospath.cpython-312.pyc
new file mode 100644
index 00000000..55a887aa
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiofiles/__pycache__/ospath.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/base.py b/Backend/venv/lib/python3.12/site-packages/aiofiles/base.py
new file mode 100644
index 00000000..07f2c2e5
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiofiles/base.py
@@ -0,0 +1,113 @@
+"""Various base classes."""
+from types import coroutine
+from collections.abc import Coroutine
+from asyncio import get_running_loop
+
+
+class AsyncBase:
+ def __init__(self, file, loop, executor):
+ self._file = file
+ self._executor = executor
+ self._ref_loop = loop
+
+ @property
+ def _loop(self):
+ return self._ref_loop or get_running_loop()
+
+ def __aiter__(self):
+ """We are our own iterator."""
+ return self
+
+ def __repr__(self):
+ return super().__repr__() + " wrapping " + repr(self._file)
+
+ async def __anext__(self):
+ """Simulate normal file iteration."""
+ line = await self.readline()
+ if line:
+ return line
+ else:
+ raise StopAsyncIteration
+
+
+class AsyncIndirectBase(AsyncBase):
+ def __init__(self, name, loop, executor, indirect):
+ self._indirect = indirect
+ self._name = name
+ super().__init__(None, loop, executor)
+
+ @property
+ def _file(self):
+ return self._indirect()
+
+ @_file.setter
+ def _file(self, v):
+ pass # discard writes
+
+
+class _ContextManager(Coroutine):
+ __slots__ = ("_coro", "_obj")
+
+ def __init__(self, coro):
+ self._coro = coro
+ self._obj = None
+
+ def send(self, value):
+ return self._coro.send(value)
+
+ def throw(self, typ, val=None, tb=None):
+ if val is None:
+ return self._coro.throw(typ)
+ elif tb is None:
+ return self._coro.throw(typ, val)
+ else:
+ return self._coro.throw(typ, val, tb)
+
+ def close(self):
+ return self._coro.close()
+
+ @property
+ def gi_frame(self):
+ return self._coro.gi_frame
+
+ @property
+ def gi_running(self):
+ return self._coro.gi_running
+
+ @property
+ def gi_code(self):
+ return self._coro.gi_code
+
+ def __next__(self):
+ return self.send(None)
+
+ @coroutine
+ def __iter__(self):
+ resp = yield from self._coro
+ return resp
+
+ def __await__(self):
+ resp = yield from self._coro
+ return resp
+
+ async def __anext__(self):
+ resp = await self._coro
+ return resp
+
+ async def __aenter__(self):
+ self._obj = await self._coro
+ return self._obj
+
+ async def __aexit__(self, exc_type, exc, tb):
+ self._obj.close()
+ self._obj = None
+
+
+class AiofilesContextManager(_ContextManager):
+ """An adjusted async context manager for aiofiles."""
+
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
+ await get_running_loop().run_in_executor(
+ None, self._obj._file.__exit__, exc_type, exc_val, exc_tb
+ )
+ self._obj = None
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/os.py b/Backend/venv/lib/python3.12/site-packages/aiofiles/os.py
new file mode 100644
index 00000000..29bc748f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiofiles/os.py
@@ -0,0 +1,51 @@
+"""Async executor versions of file functions from the os module."""
+import os
+
+from . import ospath as path
+from .ospath import wrap
+
+__all__ = [
+ "path",
+ "stat",
+ "statvfs",
+ "rename",
+ "renames",
+ "replace",
+ "remove",
+ "unlink",
+ "mkdir",
+ "makedirs",
+ "rmdir",
+ "removedirs",
+ "link",
+ "symlink",
+ "readlink",
+ "listdir",
+ "scandir",
+ "access",
+ "sendfile",
+ "wrap",
+]
+
+
+stat = wrap(os.stat)
+rename = wrap(os.rename)
+renames = wrap(os.renames)
+replace = wrap(os.replace)
+remove = wrap(os.remove)
+unlink = wrap(os.unlink)
+mkdir = wrap(os.mkdir)
+makedirs = wrap(os.makedirs)
+rmdir = wrap(os.rmdir)
+removedirs = wrap(os.removedirs)
+link = wrap(os.link)
+symlink = wrap(os.symlink)
+readlink = wrap(os.readlink)
+listdir = wrap(os.listdir)
+scandir = wrap(os.scandir)
+access = wrap(os.access)
+
+if hasattr(os, "sendfile"):
+ sendfile = wrap(os.sendfile)
+if hasattr(os, "statvfs"):
+ statvfs = wrap(os.statvfs)
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/ospath.py b/Backend/venv/lib/python3.12/site-packages/aiofiles/ospath.py
new file mode 100644
index 00000000..5f32a43d
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiofiles/ospath.py
@@ -0,0 +1,28 @@
+"""Async executor versions of file functions from the os.path module."""
+import asyncio
+from functools import partial, wraps
+from os import path
+
+
+def wrap(func):
+ @wraps(func)
+ async def run(*args, loop=None, executor=None, **kwargs):
+ if loop is None:
+ loop = asyncio.get_running_loop()
+ pfunc = partial(func, *args, **kwargs)
+ return await loop.run_in_executor(executor, pfunc)
+
+ return run
+
+
+exists = wrap(path.exists)
+isfile = wrap(path.isfile)
+isdir = wrap(path.isdir)
+islink = wrap(path.islink)
+ismount = wrap(path.ismount)
+getsize = wrap(path.getsize)
+getmtime = wrap(path.getmtime)
+getatime = wrap(path.getatime)
+getctime = wrap(path.getctime)
+samefile = wrap(path.samefile)
+sameopenfile = wrap(path.sameopenfile)
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/tempfile/__init__.py b/Backend/venv/lib/python3.12/site-packages/aiofiles/tempfile/__init__.py
new file mode 100644
index 00000000..ac3f8bd8
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiofiles/tempfile/__init__.py
@@ -0,0 +1,357 @@
+import asyncio
+from functools import partial, singledispatch
+from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOBase
+from tempfile import NamedTemporaryFile as syncNamedTemporaryFile
+from tempfile import SpooledTemporaryFile as syncSpooledTemporaryFile
+from tempfile import TemporaryDirectory as syncTemporaryDirectory
+from tempfile import TemporaryFile as syncTemporaryFile
+from tempfile import _TemporaryFileWrapper as syncTemporaryFileWrapper
+
+from ..base import AiofilesContextManager
+from ..threadpool.binary import AsyncBufferedIOBase, AsyncBufferedReader, AsyncFileIO
+from ..threadpool.text import AsyncTextIOWrapper
+from .temptypes import AsyncSpooledTemporaryFile, AsyncTemporaryDirectory
+import sys
+
+__all__ = [
+ "NamedTemporaryFile",
+ "TemporaryFile",
+ "SpooledTemporaryFile",
+ "TemporaryDirectory",
+]
+
+
+# ================================================================
+# Public methods for async open and return of temp file/directory
+# objects with async interface
+# ================================================================
+if sys.version_info >= (3, 12):
+
+ def NamedTemporaryFile(
+ mode="w+b",
+ buffering=-1,
+ encoding=None,
+ newline=None,
+ suffix=None,
+ prefix=None,
+ dir=None,
+ delete=True,
+ delete_on_close=True,
+ loop=None,
+ executor=None,
+ ):
+ """Async open a named temporary file"""
+ return AiofilesContextManager(
+ _temporary_file(
+ named=True,
+ mode=mode,
+ buffering=buffering,
+ encoding=encoding,
+ newline=newline,
+ suffix=suffix,
+ prefix=prefix,
+ dir=dir,
+ delete=delete,
+ delete_on_close=delete_on_close,
+ loop=loop,
+ executor=executor,
+ )
+ )
+
+else:
+
+ def NamedTemporaryFile(
+ mode="w+b",
+ buffering=-1,
+ encoding=None,
+ newline=None,
+ suffix=None,
+ prefix=None,
+ dir=None,
+ delete=True,
+ loop=None,
+ executor=None,
+ ):
+ """Async open a named temporary file"""
+ return AiofilesContextManager(
+ _temporary_file(
+ named=True,
+ mode=mode,
+ buffering=buffering,
+ encoding=encoding,
+ newline=newline,
+ suffix=suffix,
+ prefix=prefix,
+ dir=dir,
+ delete=delete,
+ loop=loop,
+ executor=executor,
+ )
+ )
+
+
+def TemporaryFile(
+ mode="w+b",
+ buffering=-1,
+ encoding=None,
+ newline=None,
+ suffix=None,
+ prefix=None,
+ dir=None,
+ loop=None,
+ executor=None,
+):
+ """Async open an unnamed temporary file"""
+ return AiofilesContextManager(
+ _temporary_file(
+ named=False,
+ mode=mode,
+ buffering=buffering,
+ encoding=encoding,
+ newline=newline,
+ suffix=suffix,
+ prefix=prefix,
+ dir=dir,
+ loop=loop,
+ executor=executor,
+ )
+ )
+
+
+def SpooledTemporaryFile(
+ max_size=0,
+ mode="w+b",
+ buffering=-1,
+ encoding=None,
+ newline=None,
+ suffix=None,
+ prefix=None,
+ dir=None,
+ loop=None,
+ executor=None,
+):
+ """Async open a spooled temporary file"""
+ return AiofilesContextManager(
+ _spooled_temporary_file(
+ max_size=max_size,
+ mode=mode,
+ buffering=buffering,
+ encoding=encoding,
+ newline=newline,
+ suffix=suffix,
+ prefix=prefix,
+ dir=dir,
+ loop=loop,
+ executor=executor,
+ )
+ )
+
+
+def TemporaryDirectory(suffix=None, prefix=None, dir=None, loop=None, executor=None):
+ """Async open a temporary directory"""
+ return AiofilesContextManagerTempDir(
+ _temporary_directory(
+ suffix=suffix, prefix=prefix, dir=dir, loop=loop, executor=executor
+ )
+ )
+
+
+# =========================================================
+# Internal coroutines to open new temp files/directories
+# =========================================================
+if sys.version_info >= (3, 12):
+
+ async def _temporary_file(
+ named=True,
+ mode="w+b",
+ buffering=-1,
+ encoding=None,
+ newline=None,
+ suffix=None,
+ prefix=None,
+ dir=None,
+ delete=True,
+ delete_on_close=True,
+ loop=None,
+ executor=None,
+ max_size=0,
+ ):
+ """Async method to open a temporary file with async interface"""
+ if loop is None:
+ loop = asyncio.get_running_loop()
+
+ if named:
+ cb = partial(
+ syncNamedTemporaryFile,
+ mode=mode,
+ buffering=buffering,
+ encoding=encoding,
+ newline=newline,
+ suffix=suffix,
+ prefix=prefix,
+ dir=dir,
+ delete=delete,
+ delete_on_close=delete_on_close,
+ )
+ else:
+ cb = partial(
+ syncTemporaryFile,
+ mode=mode,
+ buffering=buffering,
+ encoding=encoding,
+ newline=newline,
+ suffix=suffix,
+ prefix=prefix,
+ dir=dir,
+ )
+
+ f = await loop.run_in_executor(executor, cb)
+
+ # Wrap based on type of underlying IO object
+ if type(f) is syncTemporaryFileWrapper:
+ # _TemporaryFileWrapper was used (named files)
+ result = wrap(f.file, f, loop=loop, executor=executor)
+ result._closer = f._closer
+ return result
+ else:
+ # IO object was returned directly without wrapper
+ return wrap(f, f, loop=loop, executor=executor)
+
+else:
+
+ async def _temporary_file(
+ named=True,
+ mode="w+b",
+ buffering=-1,
+ encoding=None,
+ newline=None,
+ suffix=None,
+ prefix=None,
+ dir=None,
+ delete=True,
+ loop=None,
+ executor=None,
+ max_size=0,
+ ):
+ """Async method to open a temporary file with async interface"""
+ if loop is None:
+ loop = asyncio.get_running_loop()
+
+ if named:
+ cb = partial(
+ syncNamedTemporaryFile,
+ mode=mode,
+ buffering=buffering,
+ encoding=encoding,
+ newline=newline,
+ suffix=suffix,
+ prefix=prefix,
+ dir=dir,
+ delete=delete,
+ )
+ else:
+ cb = partial(
+ syncTemporaryFile,
+ mode=mode,
+ buffering=buffering,
+ encoding=encoding,
+ newline=newline,
+ suffix=suffix,
+ prefix=prefix,
+ dir=dir,
+ )
+
+ f = await loop.run_in_executor(executor, cb)
+
+ # Wrap based on type of underlying IO object
+ if type(f) is syncTemporaryFileWrapper:
+ # _TemporaryFileWrapper was used (named files)
+ result = wrap(f.file, f, loop=loop, executor=executor)
+ # add delete property
+ result.delete = f.delete
+ return result
+ else:
+ # IO object was returned directly without wrapper
+ return wrap(f, f, loop=loop, executor=executor)
+
+
+async def _spooled_temporary_file(
+ max_size=0,
+ mode="w+b",
+ buffering=-1,
+ encoding=None,
+ newline=None,
+ suffix=None,
+ prefix=None,
+ dir=None,
+ loop=None,
+ executor=None,
+):
+ """Open a spooled temporary file with async interface"""
+ if loop is None:
+ loop = asyncio.get_running_loop()
+
+ cb = partial(
+ syncSpooledTemporaryFile,
+ max_size=max_size,
+ mode=mode,
+ buffering=buffering,
+ encoding=encoding,
+ newline=newline,
+ suffix=suffix,
+ prefix=prefix,
+ dir=dir,
+ )
+
+ f = await loop.run_in_executor(executor, cb)
+
+ # Single interface provided by SpooledTemporaryFile for all modes
+ return AsyncSpooledTemporaryFile(f, loop=loop, executor=executor)
+
+
+async def _temporary_directory(
+ suffix=None, prefix=None, dir=None, loop=None, executor=None
+):
+ """Async method to open a temporary directory with async interface"""
+ if loop is None:
+ loop = asyncio.get_running_loop()
+
+ cb = partial(syncTemporaryDirectory, suffix, prefix, dir)
+ f = await loop.run_in_executor(executor, cb)
+
+ return AsyncTemporaryDirectory(f, loop=loop, executor=executor)
+
+
+class AiofilesContextManagerTempDir(AiofilesContextManager):
+ """With returns the directory location, not the object (matching sync lib)"""
+
+ async def __aenter__(self):
+ self._obj = await self._coro
+ return self._obj.name
+
+
+@singledispatch
+def wrap(base_io_obj, file, *, loop=None, executor=None):
+ """Wrap the object with interface based on type of underlying IO"""
+ raise TypeError("Unsupported IO type: {}".format(base_io_obj))
+
+
+@wrap.register(TextIOBase)
+def _(base_io_obj, file, *, loop=None, executor=None):
+ return AsyncTextIOWrapper(file, loop=loop, executor=executor)
+
+
+@wrap.register(BufferedWriter)
+def _(base_io_obj, file, *, loop=None, executor=None):
+ return AsyncBufferedIOBase(file, loop=loop, executor=executor)
+
+
+@wrap.register(BufferedReader)
+@wrap.register(BufferedRandom)
+def _(base_io_obj, file, *, loop=None, executor=None):
+ return AsyncBufferedReader(file, loop=loop, executor=executor)
+
+
+@wrap.register(FileIO)
+def _(base_io_obj, file, *, loop=None, executor=None):
+ return AsyncFileIO(file, loop=loop, executor=executor)
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/tempfile/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiofiles/tempfile/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..ee31f0a6
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiofiles/tempfile/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/tempfile/__pycache__/temptypes.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiofiles/tempfile/__pycache__/temptypes.cpython-312.pyc
new file mode 100644
index 00000000..ecd3c274
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiofiles/tempfile/__pycache__/temptypes.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/tempfile/temptypes.py b/Backend/venv/lib/python3.12/site-packages/aiofiles/tempfile/temptypes.py
new file mode 100644
index 00000000..1a1b1a88
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiofiles/tempfile/temptypes.py
@@ -0,0 +1,69 @@
+"""Async wrappers for spooled temp files and temp directory objects"""
+from functools import partial
+
+from ..base import AsyncBase
+from ..threadpool.utils import (
+ cond_delegate_to_executor,
+ delegate_to_executor,
+ proxy_property_directly,
+)
+
+
+@delegate_to_executor("fileno", "rollover")
+@cond_delegate_to_executor(
+ "close",
+ "flush",
+ "isatty",
+ "read",
+ "readline",
+ "readlines",
+ "seek",
+ "tell",
+ "truncate",
+)
+@proxy_property_directly("closed", "encoding", "mode", "name", "newlines")
+class AsyncSpooledTemporaryFile(AsyncBase):
+ """Async wrapper for SpooledTemporaryFile class"""
+
+ async def _check(self):
+ if self._file._rolled:
+ return
+ max_size = self._file._max_size
+ if max_size and self._file.tell() > max_size:
+ await self.rollover()
+
+ async def write(self, s):
+ """Implementation to anticipate rollover"""
+ if self._file._rolled:
+ cb = partial(self._file.write, s)
+ return await self._loop.run_in_executor(self._executor, cb)
+ else:
+ file = self._file._file # reference underlying base IO object
+ rv = file.write(s)
+ await self._check()
+ return rv
+
+ async def writelines(self, iterable):
+ """Implementation to anticipate rollover"""
+ if self._file._rolled:
+ cb = partial(self._file.writelines, iterable)
+ return await self._loop.run_in_executor(self._executor, cb)
+ else:
+ file = self._file._file # reference underlying base IO object
+ rv = file.writelines(iterable)
+ await self._check()
+ return rv
+
+
+@delegate_to_executor("cleanup")
+@proxy_property_directly("name")
+class AsyncTemporaryDirectory:
+ """Async wrapper for TemporaryDirectory class"""
+
+ def __init__(self, file, loop, executor):
+ self._file = file
+ self._loop = loop
+ self._executor = executor
+
+ async def close(self):
+ await self.cleanup()
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/__init__.py b/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/__init__.py
new file mode 100644
index 00000000..a1cc673d
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/__init__.py
@@ -0,0 +1,141 @@
+"""Handle files using a thread pool executor."""
+import asyncio
+import sys
+from functools import partial, singledispatch
+from io import (
+ BufferedIOBase,
+ BufferedRandom,
+ BufferedReader,
+ BufferedWriter,
+ FileIO,
+ TextIOBase,
+)
+from types import coroutine
+
+from ..base import AiofilesContextManager
+from .binary import (
+ AsyncBufferedIOBase,
+ AsyncBufferedReader,
+ AsyncFileIO,
+ AsyncIndirectBufferedIOBase,
+)
+from .text import AsyncTextIndirectIOWrapper, AsyncTextIOWrapper
+
+sync_open = open
+
+__all__ = (
+ "open",
+ "stdin",
+ "stdout",
+ "stderr",
+ "stdin_bytes",
+ "stdout_bytes",
+ "stderr_bytes",
+)
+
+
+def open(
+ file,
+ mode="r",
+ buffering=-1,
+ encoding=None,
+ errors=None,
+ newline=None,
+ closefd=True,
+ opener=None,
+ *,
+ loop=None,
+ executor=None,
+):
+ return AiofilesContextManager(
+ _open(
+ file,
+ mode=mode,
+ buffering=buffering,
+ encoding=encoding,
+ errors=errors,
+ newline=newline,
+ closefd=closefd,
+ opener=opener,
+ loop=loop,
+ executor=executor,
+ )
+ )
+
+
+@coroutine
+def _open(
+ file,
+ mode="r",
+ buffering=-1,
+ encoding=None,
+ errors=None,
+ newline=None,
+ closefd=True,
+ opener=None,
+ *,
+ loop=None,
+ executor=None,
+):
+ """Open an asyncio file."""
+ if loop is None:
+ loop = asyncio.get_running_loop()
+ cb = partial(
+ sync_open,
+ file,
+ mode=mode,
+ buffering=buffering,
+ encoding=encoding,
+ errors=errors,
+ newline=newline,
+ closefd=closefd,
+ opener=opener,
+ )
+ f = yield from loop.run_in_executor(executor, cb)
+
+ return wrap(f, loop=loop, executor=executor)
+
+
+@singledispatch
+def wrap(file, *, loop=None, executor=None):
+ raise TypeError("Unsupported io type: {}.".format(file))
+
+
+@wrap.register(TextIOBase)
+def _(file, *, loop=None, executor=None):
+ return AsyncTextIOWrapper(file, loop=loop, executor=executor)
+
+
+@wrap.register(BufferedWriter)
+@wrap.register(BufferedIOBase)
+def _(file, *, loop=None, executor=None):
+ return AsyncBufferedIOBase(file, loop=loop, executor=executor)
+
+
+@wrap.register(BufferedReader)
+@wrap.register(BufferedRandom)
+def _(file, *, loop=None, executor=None):
+ return AsyncBufferedReader(file, loop=loop, executor=executor)
+
+
+@wrap.register(FileIO)
+def _(file, *, loop=None, executor=None):
+ return AsyncFileIO(file, loop=loop, executor=executor)
+
+
+stdin = AsyncTextIndirectIOWrapper("sys.stdin", None, None, indirect=lambda: sys.stdin)
+stdout = AsyncTextIndirectIOWrapper(
+ "sys.stdout", None, None, indirect=lambda: sys.stdout
+)
+stderr = AsyncTextIndirectIOWrapper(
+ "sys.stderr", None, None, indirect=lambda: sys.stderr
+)
+stdin_bytes = AsyncIndirectBufferedIOBase(
+ "sys.stdin.buffer", None, None, indirect=lambda: sys.stdin.buffer
+)
+stdout_bytes = AsyncIndirectBufferedIOBase(
+ "sys.stdout.buffer", None, None, indirect=lambda: sys.stdout.buffer
+)
+stderr_bytes = AsyncIndirectBufferedIOBase(
+ "sys.stderr.buffer", None, None, indirect=lambda: sys.stderr.buffer
+)
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..d488e49e
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/__pycache__/binary.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/__pycache__/binary.cpython-312.pyc
new file mode 100644
index 00000000..b73f5558
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/__pycache__/binary.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/__pycache__/text.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/__pycache__/text.cpython-312.pyc
new file mode 100644
index 00000000..559fdf52
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/__pycache__/text.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/__pycache__/utils.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/__pycache__/utils.cpython-312.pyc
new file mode 100644
index 00000000..9817d99c
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/__pycache__/utils.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/binary.py b/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/binary.py
new file mode 100644
index 00000000..63fcaff2
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/binary.py
@@ -0,0 +1,104 @@
+from ..base import AsyncBase, AsyncIndirectBase
+from .utils import delegate_to_executor, proxy_method_directly, proxy_property_directly
+
+
+@delegate_to_executor(
+ "close",
+ "flush",
+ "isatty",
+ "read",
+ "read1",
+ "readinto",
+ "readline",
+ "readlines",
+ "seek",
+ "seekable",
+ "tell",
+ "truncate",
+ "writable",
+ "write",
+ "writelines",
+)
+@proxy_method_directly("detach", "fileno", "readable")
+@proxy_property_directly("closed", "raw", "name", "mode")
+class AsyncBufferedIOBase(AsyncBase):
+ """The asyncio executor version of io.BufferedWriter and BufferedIOBase."""
+
+
+@delegate_to_executor("peek")
+class AsyncBufferedReader(AsyncBufferedIOBase):
+ """The asyncio executor version of io.BufferedReader and Random."""
+
+
+@delegate_to_executor(
+ "close",
+ "flush",
+ "isatty",
+ "read",
+ "readall",
+ "readinto",
+ "readline",
+ "readlines",
+ "seek",
+ "seekable",
+ "tell",
+ "truncate",
+ "writable",
+ "write",
+ "writelines",
+)
+@proxy_method_directly("fileno", "readable")
+@proxy_property_directly("closed", "name", "mode")
+class AsyncFileIO(AsyncBase):
+ """The asyncio executor version of io.FileIO."""
+
+
+@delegate_to_executor(
+ "close",
+ "flush",
+ "isatty",
+ "read",
+ "read1",
+ "readinto",
+ "readline",
+ "readlines",
+ "seek",
+ "seekable",
+ "tell",
+ "truncate",
+ "writable",
+ "write",
+ "writelines",
+)
+@proxy_method_directly("detach", "fileno", "readable")
+@proxy_property_directly("closed", "raw", "name", "mode")
+class AsyncIndirectBufferedIOBase(AsyncIndirectBase):
+ """The indirect asyncio executor version of io.BufferedWriter and BufferedIOBase."""
+
+
+@delegate_to_executor("peek")
+class AsyncIndirectBufferedReader(AsyncIndirectBufferedIOBase):
+ """The indirect asyncio executor version of io.BufferedReader and Random."""
+
+
+@delegate_to_executor(
+ "close",
+ "flush",
+ "isatty",
+ "read",
+ "readall",
+ "readinto",
+ "readline",
+ "readlines",
+ "seek",
+ "seekable",
+ "tell",
+ "truncate",
+ "writable",
+ "write",
+ "writelines",
+)
+@proxy_method_directly("fileno", "readable")
+@proxy_property_directly("closed", "name", "mode")
+class AsyncIndirectFileIO(AsyncIndirectBase):
+ """The indirect asyncio executor version of io.FileIO."""
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/text.py b/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/text.py
new file mode 100644
index 00000000..0e625909
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/text.py
@@ -0,0 +1,64 @@
+from ..base import AsyncBase, AsyncIndirectBase
+from .utils import delegate_to_executor, proxy_method_directly, proxy_property_directly
+
+
+@delegate_to_executor(
+ "close",
+ "flush",
+ "isatty",
+ "read",
+ "readable",
+ "readline",
+ "readlines",
+ "seek",
+ "seekable",
+ "tell",
+ "truncate",
+ "write",
+ "writable",
+ "writelines",
+)
+@proxy_method_directly("detach", "fileno", "readable")
+@proxy_property_directly(
+ "buffer",
+ "closed",
+ "encoding",
+ "errors",
+ "line_buffering",
+ "newlines",
+ "name",
+ "mode",
+)
+class AsyncTextIOWrapper(AsyncBase):
+ """The asyncio executor version of io.TextIOWrapper."""
+
+
+@delegate_to_executor(
+ "close",
+ "flush",
+ "isatty",
+ "read",
+ "readable",
+ "readline",
+ "readlines",
+ "seek",
+ "seekable",
+ "tell",
+ "truncate",
+ "write",
+ "writable",
+ "writelines",
+)
+@proxy_method_directly("detach", "fileno", "readable")
+@proxy_property_directly(
+ "buffer",
+ "closed",
+ "encoding",
+ "errors",
+ "line_buffering",
+ "newlines",
+ "name",
+ "mode",
+)
+class AsyncTextIndirectIOWrapper(AsyncIndirectBase):
+ """The indirect asyncio executor version of io.TextIOWrapper."""
diff --git a/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/utils.py b/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/utils.py
new file mode 100644
index 00000000..5fd3bb99
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiofiles/threadpool/utils.py
@@ -0,0 +1,72 @@
+import functools
+
+
+def delegate_to_executor(*attrs):
+ def cls_builder(cls):
+ for attr_name in attrs:
+ setattr(cls, attr_name, _make_delegate_method(attr_name))
+ return cls
+
+ return cls_builder
+
+
+def proxy_method_directly(*attrs):
+ def cls_builder(cls):
+ for attr_name in attrs:
+ setattr(cls, attr_name, _make_proxy_method(attr_name))
+ return cls
+
+ return cls_builder
+
+
+def proxy_property_directly(*attrs):
+ def cls_builder(cls):
+ for attr_name in attrs:
+ setattr(cls, attr_name, _make_proxy_property(attr_name))
+ return cls
+
+ return cls_builder
+
+
+def cond_delegate_to_executor(*attrs):
+ def cls_builder(cls):
+ for attr_name in attrs:
+ setattr(cls, attr_name, _make_cond_delegate_method(attr_name))
+ return cls
+
+ return cls_builder
+
+
+def _make_delegate_method(attr_name):
+ async def method(self, *args, **kwargs):
+ cb = functools.partial(getattr(self._file, attr_name), *args, **kwargs)
+ return await self._loop.run_in_executor(self._executor, cb)
+
+ return method
+
+
+def _make_proxy_method(attr_name):
+ def method(self, *args, **kwargs):
+ return getattr(self._file, attr_name)(*args, **kwargs)
+
+ return method
+
+
+def _make_proxy_property(attr_name):
+ def proxy_property(self):
+ return getattr(self._file, attr_name)
+
+ return property(proxy_property)
+
+
+def _make_cond_delegate_method(attr_name):
+ """For spooled temp files, delegate only if rolled to file object"""
+
+ async def method(self, *args, **kwargs):
+ if self._file._rolled:
+ cb = functools.partial(getattr(self._file, attr_name), *args, **kwargs)
+ return await self._loop.run_in_executor(self._executor, cb)
+ else:
+ return getattr(self._file, attr_name)(*args, **kwargs)
+
+ return method
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib-3.0.1.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/aiosmtplib-3.0.1.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiosmtplib-3.0.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib-3.0.1.dist-info/LICENSE.txt b/Backend/venv/lib/python3.12/site-packages/aiosmtplib-3.0.1.dist-info/LICENSE.txt
new file mode 100644
index 00000000..9ff8f22f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiosmtplib-3.0.1.dist-info/LICENSE.txt
@@ -0,0 +1,20 @@
+The MIT License (MIT)
+
+Copyright (c) 2022 Cole Maclean
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib-3.0.1.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/aiosmtplib-3.0.1.dist-info/METADATA
new file mode 100644
index 00000000..28230ffd
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiosmtplib-3.0.1.dist-info/METADATA
@@ -0,0 +1,120 @@
+Metadata-Version: 2.1
+Name: aiosmtplib
+Version: 3.0.1
+Summary: asyncio SMTP client
+Home-page: https://github.com/cole/aiosmtplib
+License: MIT
+Keywords: smtp,email,asyncio
+Author: Cole Maclean
+Author-email: hi@colemaclean.dev
+Requires-Python: >=3.8,<4.0
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: No Input/Output (Daemon)
+Classifier: Framework :: AsyncIO
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Topic :: Communications
+Classifier: Topic :: Communications :: Email
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Typing :: Typed
+Provides-Extra: docs
+Provides-Extra: uvloop
+Requires-Dist: furo (>=2023.9.10,<2024.0.0) ; extra == "docs"
+Requires-Dist: sphinx (>=7.0.0,<8.0.0) ; extra == "docs"
+Requires-Dist: sphinx-copybutton (>=0.5.0,<0.6.0) ; extra == "docs"
+Requires-Dist: sphinx_autodoc_typehints (>=1.24.0,<2.0.0) ; extra == "docs"
+Requires-Dist: uvloop (>=0.18,<0.19) ; extra == "uvloop"
+Project-URL: Documentation, https://aiosmtplib.readthedocs.io/en/stable/
+Project-URL: Repository, https://github.com/cole/aiosmtplib
+Description-Content-Type: text/x-rst
+
+aiosmtplib
+==========
+
+|circleci| |precommit.ci| |codecov| |pypi-version| |pypi-status| |downloads| |pypi-python-versions|
+|pypi-license|
+
+------------
+
+aiosmtplib is an asynchronous SMTP client for use with asyncio.
+
+For documentation, see `Read The Docs`_.
+
+Quickstart
+----------
+
+
+..
+ start quickstart
+
+.. code-block:: python
+
+ import asyncio
+ from email.message import EmailMessage
+
+ import aiosmtplib
+
+ message = EmailMessage()
+ message["From"] = "root@localhost"
+ message["To"] = "somebody@example.com"
+ message["Subject"] = "Hello World!"
+ message.set_content("Sent via aiosmtplib")
+
+ asyncio.run(aiosmtplib.send(message, hostname="127.0.0.1", port=25))
+
+..
+ end quickstart
+
+Requirements
+------------
+
+..
+ start requirements
+
+Python 3.8+ is required.
+
+..
+ end requirements
+
+
+Bug Reporting
+-------------
+
+..
+ start bug-reporting
+
+Bug reports (and feature requests) are welcome via `Github issues`_.
+
+.. _Github issues: https://github.com/cole/aiosmtplib/issues
+
+..
+ end bug-reporting
+
+
+.. |circleci| image:: https://circleci.com/gh/cole/aiosmtplib/tree/main.svg?style=shield
+ :target: https://circleci.com/gh/cole/aiosmtplib/tree/main
+ :alt: "aiosmtplib CircleCI build status"
+.. |pypi-version| image:: https://img.shields.io/pypi/v/aiosmtplib.svg
+ :target: https://pypi.python.org/pypi/aiosmtplib
+ :alt: "aiosmtplib on the Python Package Index"
+.. |pypi-python-versions| image:: https://img.shields.io/pypi/pyversions/aiosmtplib.svg
+.. |pypi-status| image:: https://img.shields.io/pypi/status/aiosmtplib.svg
+.. |pypi-license| image:: https://img.shields.io/pypi/l/aiosmtplib.svg
+.. |codecov| image:: https://codecov.io/gh/cole/aiosmtplib/branch/main/graph/badge.svg
+ :target: https://codecov.io/gh/cole/aiosmtplib
+.. |downloads| image:: https://pepy.tech/badge/aiosmtplib
+ :target: https://pepy.tech/project/aiosmtplib
+ :alt: "aiosmtplib on pypy.tech"
+.. |precommit.ci| image:: https://results.pre-commit.ci/badge/github/cole/aiosmtplib/main.svg
+ :target: https://results.pre-commit.ci/latest/github/cole/aiosmtplib/main
+ :alt: "pre-commit.ci status"
+.. _Read The Docs: https://aiosmtplib.readthedocs.io/en/stable/
+
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib-3.0.1.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/aiosmtplib-3.0.1.dist-info/RECORD
new file mode 100644
index 00000000..b9c775f2
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiosmtplib-3.0.1.dist-info/RECORD
@@ -0,0 +1,31 @@
+aiosmtplib-3.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+aiosmtplib-3.0.1.dist-info/LICENSE.txt,sha256=QJZYXPA0OwcT6OfW33PpC7l3l0KLFg8wXkwyb5PJuAI,1079
+aiosmtplib-3.0.1.dist-info/METADATA,sha256=w-T6mOwck4IcjTOJEzuEJV1jUrfGKERgvdTm-ys72pY,3880
+aiosmtplib-3.0.1.dist-info/RECORD,,
+aiosmtplib-3.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+aiosmtplib-3.0.1.dist-info/WHEEL,sha256=vVCvjcmxuUltf8cYhJ0sJMRDLr1XsPuxEId8YDzbyCY,88
+aiosmtplib/__init__.py,sha256=d5ywMqd09vy95sgVzOUnkpt4aTXByKJjxQW4M38VUgc,1332
+aiosmtplib/__main__.py,sha256=QVJLjeLErSggfsPgQH18Y27d7qav6-BoOinNKf7ArL0,877
+aiosmtplib/__pycache__/__init__.cpython-312.pyc,,
+aiosmtplib/__pycache__/__main__.cpython-312.pyc,,
+aiosmtplib/__pycache__/api.cpython-312.pyc,,
+aiosmtplib/__pycache__/auth.cpython-312.pyc,,
+aiosmtplib/__pycache__/email.cpython-312.pyc,,
+aiosmtplib/__pycache__/errors.cpython-312.pyc,,
+aiosmtplib/__pycache__/esmtp.cpython-312.pyc,,
+aiosmtplib/__pycache__/protocol.cpython-312.pyc,,
+aiosmtplib/__pycache__/response.cpython-312.pyc,,
+aiosmtplib/__pycache__/smtp.cpython-312.pyc,,
+aiosmtplib/__pycache__/status.cpython-312.pyc,,
+aiosmtplib/__pycache__/typing.cpython-312.pyc,,
+aiosmtplib/api.py,sha256=BSjcAqm0Oa8TG34QC2D4Qs7-UwKlEyyGQjG3l6xbeCM,5870
+aiosmtplib/auth.py,sha256=iinJ7cooLIU1x4O3fQmaHX6CfYtqKnbLnS46avQX98w,1940
+aiosmtplib/email.py,sha256=DqLoXV62UPqzJO6wXrqvuHPpAxAhIdVO6G2GBefd3m0,5454
+aiosmtplib/errors.py,sha256=HYDl1NoMfu1dRyPTj3Y8F8l56g0SlMa83uuawRrEvME,3190
+aiosmtplib/esmtp.py,sha256=CS8HQFJpbokW2GK_IeadQXfGVOBwZNW17MvvrUf9pvs,2369
+aiosmtplib/protocol.py,sha256=3ItuH2DuTJ5LZazfyxd5M0TOgl_ZVKZYdJ78rAT9IE4,12956
+aiosmtplib/py.typed,sha256=gZqFSJxZQK4TFIqj7Em-0qpGgVkr1EZ9QT7O0zz59S4,97
+aiosmtplib/response.py,sha256=TWfuuam1AQLzbkn9Kz8INz4lKEXCIXpjkRpkPUH0shw,672
+aiosmtplib/smtp.py,sha256=bnycLbGtwxK33OS1loLwIJ-p0dIqx1UjfDcNjuaT3C0,54079
+aiosmtplib/status.py,sha256=DeUp1Ea7ZTt8feo2ekdj4ft-q3x6SjECvrNTSphAZG0,106
+aiosmtplib/typing.py,sha256=cxwpfQ8_D4LNmk4mpxOAE8b9dRwUuWPpD0APN3iPuv4,1304
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib-3.0.1.dist-info/REQUESTED b/Backend/venv/lib/python3.12/site-packages/aiosmtplib-3.0.1.dist-info/REQUESTED
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib-3.0.1.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/aiosmtplib-3.0.1.dist-info/WHEEL
new file mode 100644
index 00000000..4ba76714
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiosmtplib-3.0.1.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: poetry-core 1.4.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__init__.py b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__init__.py
new file mode 100644
index 00000000..4680933c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__init__.py
@@ -0,0 +1,60 @@
+"""
+aiosmtplib
+==========
+
+An asyncio SMTP client.
+
+Originally based on smtplib from the Python 3 standard library by:
+The Dragon De Monsyne
+
+Author: Cole Maclean
+"""
+from .api import send
+from .errors import (
+ SMTPAuthenticationError,
+ SMTPConnectError,
+ SMTPConnectTimeoutError,
+ SMTPDataError,
+ SMTPException,
+ SMTPHeloError,
+ SMTPNotSupported,
+ SMTPReadTimeoutError,
+ SMTPRecipientRefused,
+ SMTPRecipientsRefused,
+ SMTPResponseException,
+ SMTPSenderRefused,
+ SMTPServerDisconnected,
+ SMTPTimeoutError,
+ SMTPConnectResponseError,
+)
+from .response import SMTPResponse
+from .smtp import SMTP
+from .typing import SMTPStatus
+
+
+__title__ = "aiosmtplib"
+__version__ = "3.0.1"
+__author__ = "Cole Maclean"
+__license__ = "MIT"
+__copyright__ = "Copyright 2022 Cole Maclean"
+__all__ = (
+ "send",
+ "SMTP",
+ "SMTPResponse",
+ "SMTPStatus",
+ "SMTPAuthenticationError",
+ "SMTPConnectError",
+ "SMTPDataError",
+ "SMTPException",
+ "SMTPHeloError",
+ "SMTPNotSupported",
+ "SMTPRecipientRefused",
+ "SMTPRecipientsRefused",
+ "SMTPResponseException",
+ "SMTPSenderRefused",
+ "SMTPServerDisconnected",
+ "SMTPTimeoutError",
+ "SMTPConnectTimeoutError",
+ "SMTPReadTimeoutError",
+ "SMTPConnectResponseError",
+)
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__main__.py b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__main__.py
new file mode 100644
index 00000000..ee9247be
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__main__.py
@@ -0,0 +1,30 @@
+from aiosmtplib.smtp import SMTP, SMTP_PORT
+
+
+raw_hostname = input("SMTP server hostname [localhost]: ") # nosec
+raw_port = input(f"SMTP server port [{SMTP_PORT}]: ") # nosec
+raw_sender = input("From: ") # nosec
+raw_recipients = input("To: ") # nosec
+
+hostname = raw_hostname or "localhost"
+port = int(raw_port) if raw_port else SMTP_PORT
+recipients = raw_recipients.split(",")
+lines = []
+
+print("Enter message, end with ^D:")
+while True:
+ try:
+ lines.append(input()) # nosec
+ except EOFError:
+ break
+
+message = "\n".join(lines)
+message_len = len(message.encode("utf-8"))
+print(f"Message length (bytes): {message_len}")
+
+smtp_client = SMTP(hostname=hostname or "localhost", port=port, start_tls=False)
+sendmail_errors, sendmail_response = smtp_client.sendmail_sync(
+ raw_sender, recipients, message
+)
+
+print(f"Server response: {sendmail_response}")
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..a209007b
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/__main__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/__main__.cpython-312.pyc
new file mode 100644
index 00000000..72fea42e
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/__main__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/api.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/api.cpython-312.pyc
new file mode 100644
index 00000000..4968bd82
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/api.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/auth.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/auth.cpython-312.pyc
new file mode 100644
index 00000000..083e860a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/auth.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/email.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/email.cpython-312.pyc
new file mode 100644
index 00000000..f205197d
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/email.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/errors.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/errors.cpython-312.pyc
new file mode 100644
index 00000000..841826f1
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/errors.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/esmtp.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/esmtp.cpython-312.pyc
new file mode 100644
index 00000000..6de05003
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/esmtp.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/protocol.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/protocol.cpython-312.pyc
new file mode 100644
index 00000000..1d40b281
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/protocol.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/response.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/response.cpython-312.pyc
new file mode 100644
index 00000000..c9c795fb
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/response.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/smtp.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/smtp.cpython-312.pyc
new file mode 100644
index 00000000..a053db93
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/smtp.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/status.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/status.cpython-312.pyc
new file mode 100644
index 00000000..abc578c2
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/status.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/typing.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/typing.cpython-312.pyc
new file mode 100644
index 00000000..0f77b054
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/__pycache__/typing.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/api.py b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/api.py
new file mode 100644
index 00000000..13dd4a8e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/api.py
@@ -0,0 +1,139 @@
+"""
+Main public API.
+"""
+import email.message
+import socket
+import ssl
+from typing import Dict, Optional, Sequence, Tuple, Union, cast
+
+from .response import SMTPResponse
+from .smtp import DEFAULT_TIMEOUT, SMTP
+from .typing import SocketPathType
+
+
+__all__ = ("send",)
+
+
+async def send(
+ message: Union[email.message.EmailMessage, email.message.Message, str, bytes],
+ /,
+ *,
+ sender: Optional[str] = None,
+ recipients: Optional[Union[str, Sequence[str]]] = None,
+ mail_options: Optional[Sequence[str]] = None,
+ rcpt_options: Optional[Sequence[str]] = None,
+ hostname: Optional[str] = "localhost",
+ port: Optional[int] = None,
+ username: Optional[Union[str, bytes]] = None,
+ password: Optional[Union[str, bytes]] = None,
+ local_hostname: Optional[str] = None,
+ source_address: Optional[Tuple[str, int]] = None,
+ timeout: Optional[float] = DEFAULT_TIMEOUT,
+ use_tls: bool = False,
+ start_tls: Optional[bool] = None,
+ validate_certs: bool = True,
+ client_cert: Optional[str] = None,
+ client_key: Optional[str] = None,
+ tls_context: Optional[ssl.SSLContext] = None,
+ cert_bundle: Optional[str] = None,
+ socket_path: Optional[SocketPathType] = None,
+ sock: Optional[socket.socket] = None,
+) -> Tuple[Dict[str, SMTPResponse], str]:
+ """
+ Send an email message. On await, connects to the SMTP server using the details
+ provided, sends the message, then disconnects.
+
+ :param message: Message text. Either an :py:class:`email.message.EmailMessage`
+ object, ``str`` or ``bytes``. If an :py:class:`email.message.EmailMessage`
+ object is provided, sender and recipients set in the message headers will be
+ used, unless overridden by the respective keyword arguments.
+ :keyword sender: From email address. Not required if an
+ :py:class:`email.message.EmailMessage` object is provided for the `message`
+ argument.
+ :keyword recipients: Recipient email addresses. Not required if an
+ :py:class:`email.message.EmailMessage` object is provided for the `message`
+ argument.
+ :keyword hostname: Server name (or IP) to connect to. Defaults to "localhost".
+ :keyword port: Server port. Defaults ``465`` if ``use_tls`` is ``True``,
+ ``587`` if ``start_tls`` is ``True``, or ``25`` otherwise.
+ :keyword username: Username to login as after connect.
+ :keyword password: Password for login after connect.
+ :keyword local_hostname: The hostname of the client. If specified, used as the
+ FQDN of the local host in the HELO/EHLO command. Otherwise, the result of
+ :func:`socket.getfqdn`. **Note that getfqdn will block the event loop.**
+ :keyword source_address: Takes a 2-tuple (host, port) for the socket to bind to
+ as its source address before connecting. If the host is '' and port is 0,
+ the OS default behavior will be used.
+ :keyword timeout: Default timeout value for the connection, in seconds.
+ Defaults to 60.
+ :keyword use_tls: If True, make the initial connection to the server
+ over TLS/SSL. Mutually exclusive with ``start_tls``; if the server uses
+ STARTTLS, ``use_tls`` should be ``False``.
+ :keyword start_tls: Flag to initiate a STARTTLS upgrade on connect.
+ If ``None`` (the default), upgrade will be initiated if supported by the
+ server.
+ If ``True``, and upgrade will be initiated regardless of server support.
+ If ``False``, no upgrade will occur.
+ Mutually exclusive with ``use_tls``.
+ :keyword validate_certs: Determines if server certificates are
+ validated. Defaults to ``True``.
+ :keyword client_cert: Path to client side certificate, for TLS.
+ :keyword client_key: Path to client side key, for TLS.
+ :keyword tls_context: An existing :py:class:`ssl.SSLContext`, for TLS.
+ Mutually exclusive with ``client_cert``/``client_key``.
+ :keyword cert_bundle: Path to certificate bundle, for TLS verification.
+ :keyword socket_path: Path to a Unix domain socket. Not compatible with
+ hostname or port. Accepts str, bytes, or a pathlike object.
+ :keyword sock: An existing, connected socket object. If given, none of
+ hostname, port, or socket_path should be provided.
+
+ :raises ValueError: required arguments missing or mutually exclusive options
+ provided
+ """
+ if not isinstance(message, (email.message.EmailMessage, email.message.Message)):
+ if not recipients:
+ raise ValueError("Recipients must be provided with raw messages.")
+ if not sender:
+ raise ValueError("Sender must be provided with raw messages.")
+
+ sender = cast(str, sender)
+ recipients = cast(Union[str, Sequence[str]], recipients)
+
+ client = SMTP(
+ hostname=hostname,
+ port=port,
+ local_hostname=local_hostname,
+ source_address=source_address,
+ timeout=timeout,
+ use_tls=use_tls,
+ start_tls=start_tls,
+ validate_certs=validate_certs,
+ client_cert=client_cert,
+ client_key=client_key,
+ tls_context=tls_context,
+ cert_bundle=cert_bundle,
+ socket_path=socket_path,
+ sock=sock,
+ username=username,
+ password=password,
+ )
+
+ async with client:
+ if isinstance(message, (email.message.EmailMessage, email.message.Message)):
+ result = await client.send_message(
+ message,
+ sender=sender,
+ recipients=recipients,
+ mail_options=mail_options,
+ rcpt_options=rcpt_options,
+ )
+ else:
+ result = await client.sendmail(
+ sender,
+ recipients,
+ message,
+ mail_options=mail_options,
+ rcpt_options=rcpt_options,
+ )
+
+ return result
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/auth.py b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/auth.py
new file mode 100644
index 00000000..3c9d7e03
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/auth.py
@@ -0,0 +1,72 @@
+"""
+Authentication related methods.
+"""
+import base64
+import hmac
+from typing import Tuple, Union
+
+
+__all__ = ("auth_crammd5_verify", "auth_plain_encode", "auth_login_encode")
+
+
+def _ensure_bytes(value: Union[str, bytes]) -> bytes:
+ if isinstance(value, bytes):
+ return value
+
+ return value.encode("utf-8")
+
+
+def auth_crammd5_verify(
+ username: Union[str, bytes],
+ password: Union[str, bytes],
+ challenge: Union[str, bytes],
+ /,
+) -> bytes:
+ """
+ CRAM-MD5 auth uses the password as a shared secret to MD5 the server's
+ response, and sends the username combined with that (base64 encoded).
+ """
+ username_bytes = _ensure_bytes(username)
+ password_bytes = _ensure_bytes(password)
+ decoded_challenge = base64.b64decode(challenge)
+
+ md5_digest = hmac.new(password_bytes, msg=decoded_challenge, digestmod="md5")
+ verification = username_bytes + b" " + md5_digest.hexdigest().encode("ascii")
+ encoded_verification = base64.b64encode(verification)
+
+ return encoded_verification
+
+
+def auth_plain_encode(
+ username: Union[str, bytes],
+ password: Union[str, bytes],
+ /,
+) -> bytes:
+ """
+ PLAIN auth base64 encodes the username and password together.
+ """
+ username_bytes = _ensure_bytes(username)
+ password_bytes = _ensure_bytes(password)
+
+ username_and_password = b"\0" + username_bytes + b"\0" + password_bytes
+ encoded = base64.b64encode(username_and_password)
+
+ return encoded
+
+
+def auth_login_encode(
+ username: Union[str, bytes],
+ password: Union[str, bytes],
+ /,
+) -> Tuple[bytes, bytes]:
+ """
+ LOGIN auth base64 encodes the username and password and sends them
+ in sequence.
+ """
+ username_bytes = _ensure_bytes(username)
+ password_bytes = _ensure_bytes(password)
+
+ encoded_username = base64.b64encode(username_bytes)
+ encoded_password = base64.b64encode(password_bytes)
+
+ return encoded_username, encoded_password
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/email.py b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/email.py
new file mode 100644
index 00000000..f728006e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/email.py
@@ -0,0 +1,186 @@
+"""
+Email message and address formatting/parsing functions.
+"""
+import copy
+import email.charset
+import email.generator
+import email.header
+import email.headerregistry
+import email.message
+import email.policy
+import email.utils
+import io
+import re
+from typing import List, Optional, Tuple, Union
+
+
+__all__ = (
+ "extract_recipients",
+ "extract_sender",
+ "flatten_message",
+ "parse_address",
+ "quote_address",
+)
+
+
+LINE_SEP = "\r\n"
+SPECIALS_REGEX = re.compile(r'[][\\()<>@,:;".]')
+ESCAPES_REGEX = re.compile(r'[\\"]')
+UTF8_CHARSET = email.charset.Charset("utf-8")
+
+
+def parse_address(address: str) -> str:
+ """
+ Parse an email address, falling back to the raw string given.
+ """
+ display_name, parsed_address = email.utils.parseaddr(address)
+
+ return parsed_address or address.strip()
+
+
+def quote_address(address: str) -> str:
+ """
+ Quote a subset of the email addresses defined by RFC 821.
+ """
+ parsed_address = parse_address(address)
+ return f"<{parsed_address}>"
+
+
+def formataddr(pair: Tuple[str, str]) -> str:
+ """
+ Copied from the standard library, and modified to handle international (UTF-8)
+ email addresses.
+
+ The inverse of parseaddr(), this takes a 2-tuple of the form
+ (realname, email_address) and returns the string value suitable
+ for an RFC 2822 From, To or Cc header.
+ If the first element of pair is false, then the second element is
+ returned unmodified.
+ """
+ name, address = pair
+ if name:
+ encoded_name = UTF8_CHARSET.header_encode(name)
+ return f"{encoded_name} <{address}>"
+ else:
+ quotes = ""
+ if SPECIALS_REGEX.search(name):
+ quotes = '"'
+ name = ESCAPES_REGEX.sub(r"\\\g<0>", name)
+ return f"{quotes}{name}{quotes} <{address}>"
+
+ return address
+
+
+def flatten_message(
+ message: Union[email.message.EmailMessage, email.message.Message],
+ /,
+ *,
+ utf8: bool = False,
+ cte_type: str = "8bit",
+) -> bytes:
+ # Make a local copy so we can delete the bcc headers.
+ message_copy = copy.copy(message)
+ del message_copy["Bcc"]
+ del message_copy["Resent-Bcc"]
+
+ if isinstance(message, email.message.EmailMessage):
+ # New message class, default policy
+ policy = email.policy.default.clone(
+ linesep=LINE_SEP,
+ utf8=utf8,
+ cte_type=cte_type,
+ )
+ else:
+ # Old message class, Compat32 policy.
+ # Compat32 cannot use UTF8
+ policy = email.policy.compat32.clone(linesep=LINE_SEP, cte_type=cte_type)
+
+ with io.BytesIO() as messageio:
+ generator = email.generator.BytesGenerator(messageio, policy=policy)
+ generator.flatten(message_copy)
+ flat_message = messageio.getvalue()
+
+ return flat_message
+
+
+def extract_addresses(
+ header: Union[str, email.headerregistry.AddressHeader, email.header.Header],
+ /,
+) -> List[str]:
+ """
+ Convert address headers into raw email addresses, suitable for use in
+ low level SMTP commands.
+ """
+ addresses = []
+ if isinstance(header, email.headerregistry.AddressHeader):
+ for address in header.addresses:
+ # If the object has been assigned an iterable, it's possible to get
+ # a string here
+ if isinstance(address, email.headerregistry.Address):
+ addresses.append(address.addr_spec)
+ else:
+ addresses.append(parse_address(address))
+ elif isinstance(header, email.header.Header):
+ for address_bytes, charset in email.header.decode_header(header):
+ if charset is None:
+ charset = "ascii"
+ addresses.append(parse_address(str(address_bytes, encoding=charset)))
+ else:
+ addresses.extend(addr for _, addr in email.utils.getaddresses([header]))
+
+ return addresses
+
+
+def extract_sender(
+ message: Union[email.message.EmailMessage, email.message.Message],
+ /,
+) -> Optional[str]:
+ """
+ Extract the sender from the message object given.
+ """
+ resent_dates = message.get_all("Resent-Date")
+
+ if resent_dates is not None and len(resent_dates) > 1:
+ raise ValueError("Message has more than one 'Resent-' header block")
+ elif resent_dates:
+ sender_header_name = "Resent-Sender"
+ from_header_name = "Resent-From"
+ else:
+ sender_header_name = "Sender"
+ from_header_name = "From"
+
+ # Prefer the sender field per RFC 2822:3.6.2.
+ if sender_header_name in message:
+ sender_header = message[sender_header_name]
+ else:
+ sender_header = message[from_header_name]
+
+ if sender_header is None:
+ return None
+
+ return extract_addresses(sender_header)[0]
+
+
+def extract_recipients(
+ message: Union[email.message.EmailMessage, email.message.Message],
+ /,
+) -> List[str]:
+ """
+ Extract the recipients from the message object given.
+ """
+ recipients: List[str] = []
+
+ resent_dates = message.get_all("Resent-Date")
+
+ if resent_dates is not None and len(resent_dates) > 1:
+ raise ValueError("Message has more than one 'Resent-' header block")
+ elif resent_dates:
+ recipient_headers = ("Resent-To", "Resent-Cc", "Resent-Bcc")
+ else:
+ recipient_headers = ("To", "Cc", "Bcc")
+
+ for header in recipient_headers:
+ for recipient in message.get_all(header, failobj=[]):
+ recipients.extend(extract_addresses(recipient))
+
+ return recipients
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/errors.py b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/errors.py
new file mode 100644
index 00000000..bd674ca5
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/errors.py
@@ -0,0 +1,137 @@
+from asyncio import TimeoutError
+from typing import List
+
+
+__all__ = (
+ "SMTPAuthenticationError",
+ "SMTPConnectError",
+ "SMTPDataError",
+ "SMTPException",
+ "SMTPHeloError",
+ "SMTPNotSupported",
+ "SMTPRecipientRefused",
+ "SMTPRecipientsRefused",
+ "SMTPResponseException",
+ "SMTPSenderRefused",
+ "SMTPServerDisconnected",
+ "SMTPTimeoutError",
+ "SMTPConnectTimeoutError",
+ "SMTPReadTimeoutError",
+ "SMTPConnectResponseError",
+)
+
+
+class SMTPException(Exception):
+ """
+ Base class for all SMTP exceptions.
+ """
+
+ def __init__(self, message: str, /) -> None:
+ self.message = message
+ self.args = (message,)
+
+
+class SMTPServerDisconnected(SMTPException, ConnectionError):
+ """
+ The connection was lost unexpectedly, or a command was run that requires
+ a connection.
+ """
+
+
+class SMTPConnectError(SMTPException, ConnectionError):
+ """
+ An error occurred while connecting to the SMTP server.
+ """
+
+
+class SMTPTimeoutError(SMTPException, TimeoutError):
+ """
+ A timeout occurred while performing a network operation.
+ """
+
+
+class SMTPConnectTimeoutError(SMTPTimeoutError, SMTPConnectError):
+ """
+ A timeout occurred while connecting to the SMTP server.
+ """
+
+
+class SMTPReadTimeoutError(SMTPTimeoutError):
+ """
+ A timeout occurred while waiting for a response from the SMTP server.
+ """
+
+
+class SMTPNotSupported(SMTPException):
+ """
+ A command or argument sent to the SMTP server is not supported.
+ """
+
+
+class SMTPResponseException(SMTPException):
+ """
+ Base class for all server responses with error codes.
+ """
+
+ def __init__(self, code: int, message: str, /) -> None:
+ self.code = code
+ self.message = message
+ self.args = (code, message)
+
+
+class SMTPConnectResponseError(SMTPResponseException, SMTPConnectError):
+ """
+ The SMTP server returned an invalid response code after connecting.
+ """
+
+
+class SMTPHeloError(SMTPResponseException):
+ """
+ Server refused HELO or EHLO.
+ """
+
+
+class SMTPDataError(SMTPResponseException):
+ """
+ Server refused DATA content.
+ """
+
+
+class SMTPAuthenticationError(SMTPResponseException):
+ """
+ Server refused our AUTH request; may be caused by invalid credentials.
+ """
+
+
+class SMTPSenderRefused(SMTPResponseException):
+ """
+ SMTP server refused the message sender.
+ """
+
+ def __init__(self, code: int, message: str, sender: str, /) -> None:
+ self.code = code
+ self.message = message
+ self.sender = sender
+ self.args = (code, message, sender)
+
+
+class SMTPRecipientRefused(SMTPResponseException):
+ """
+ SMTP server refused a message recipient.
+ """
+
+ def __init__(self, code: int, message: str, recipient: str, /) -> None:
+ self.code = code
+ self.message = message
+ self.recipient = recipient
+ self.args = (code, message, recipient)
+
+
+class SMTPRecipientsRefused(SMTPException):
+ """
+ SMTP server refused multiple recipients.
+ """
+
+ def __init__(self, recipients: List[SMTPRecipientRefused], /) -> None:
+ self.recipients = recipients
+ self.args = (recipients,)
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/esmtp.py b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/esmtp.py
new file mode 100644
index 00000000..8610f322
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/esmtp.py
@@ -0,0 +1,72 @@
+"""
+ESMTP utils
+"""
+import re
+from typing import Dict, List, Tuple
+
+
+__all__ = ("parse_esmtp_extensions",)
+
+
+OLDSTYLE_AUTH_REGEX = re.compile(r"auth=(?P.*)", flags=re.I)
+EXTENSIONS_REGEX = re.compile(r"(?P[A-Za-z0-9][A-Za-z0-9\-]*) ?")
+
+
+def parse_esmtp_extensions(message: str) -> Tuple[Dict[str, str], List[str]]:
+ """
+ Parse an EHLO response from the server into a dict of {extension: params}
+ and a list of auth method names.
+
+ It might look something like:
+
+ 220 size.does.matter.af.MIL (More ESMTP than Crappysoft!)
+ EHLO heaven.af.mil
+ 250-size.does.matter.af.MIL offers FIFTEEN extensions:
+ 250-8BITMIME
+ 250-PIPELINING
+ 250-DSN
+ 250-ENHANCEDSTATUSCODES
+ 250-EXPN
+ 250-HELP
+ 250-SAML
+ 250-SEND
+ 250-SOML
+ 250-TURN
+ 250-XADR
+ 250-XSTA
+ 250-ETRN
+ 250-XGEN
+ 250 SIZE 51200000
+ """
+ esmtp_extensions = {}
+ auth_types: List[str] = []
+
+ response_lines = message.split("\n")
+
+ # ignore the first line
+ for line in response_lines[1:]:
+ # To be able to communicate with as many SMTP servers as possible,
+ # we have to take the old-style auth advertisement into account,
+ # because:
+ # 1) Else our SMTP feature parser gets confused.
+ # 2) There are some servers that only advertise the auth methods we
+ # support using the old style.
+ auth_match = OLDSTYLE_AUTH_REGEX.match(line)
+ if auth_match is not None:
+ auth_type = auth_match.group("auth")
+ auth_types.append(auth_type.lower().strip())
+
+ # RFC 1869 requires a space between ehlo keyword and parameters.
+ # It's actually stricter, in that only spaces are allowed between
+ # parameters, but were not going to check for that here. Note
+ # that the space isn't present if there are no parameters.
+ extensions = EXTENSIONS_REGEX.match(line)
+ if extensions is not None:
+ extension = extensions.group("ext").lower()
+ params = extensions.string[extensions.end("ext") :].strip()
+ esmtp_extensions[extension] = params
+
+ if extension == "auth":
+ auth_types.extend([param.strip().lower() for param in params.split()])
+
+ return esmtp_extensions, auth_types
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/protocol.py b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/protocol.py
new file mode 100644
index 00000000..e7a4bc36
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/protocol.py
@@ -0,0 +1,373 @@
+"""
+An ``asyncio.Protocol`` subclass for lower level IO handling.
+"""
+import asyncio
+import collections
+import re
+import ssl
+from typing import Deque, Optional, cast
+
+from .errors import (
+ SMTPDataError,
+ SMTPReadTimeoutError,
+ SMTPResponseException,
+ SMTPServerDisconnected,
+ SMTPTimeoutError,
+)
+from .response import SMTPResponse
+from .typing import SMTPStatus
+
+
+__all__ = ("SMTPProtocol",)
+
+
+MAX_LINE_LENGTH = 8192
+LINE_ENDINGS_REGEX = re.compile(rb"(?:\r\n|\n|\r(?!\n))")
+PERIOD_REGEX = re.compile(rb"(?m)^\.")
+
+
+class FlowControlMixin(asyncio.Protocol):
+ """
+ Reusable flow control logic for StreamWriter.drain().
+ This implements the protocol methods pause_writing(),
+ resume_writing() and connection_lost(). If the subclass overrides
+ these it must call the super methods.
+ StreamWriter.drain() must wait for _drain_helper() coroutine.
+
+ Copied from stdlib as per recommendation: https://bugs.python.org/msg343685.
+ Logging and asserts removed, type annotations added.
+ """
+
+ def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None):
+ if loop is None:
+ self._loop = asyncio.get_event_loop()
+ else:
+ self._loop = loop
+
+ self._paused = False
+ self._drain_waiters: Deque[asyncio.Future[None]] = collections.deque()
+ self._connection_lost = False
+
+ def pause_writing(self) -> None:
+ self._paused = True
+
+ def resume_writing(self) -> None:
+ self._paused = False
+
+ for waiter in self._drain_waiters:
+ if not waiter.done():
+ waiter.set_result(None)
+
+ def connection_lost(self, exc: Optional[Exception]) -> None:
+ self._connection_lost = True
+ # Wake up the writer(s) if currently paused.
+ if not self._paused:
+ return
+
+ for waiter in self._drain_waiters:
+ if not waiter.done():
+ if exc is None:
+ waiter.set_result(None)
+ else:
+ waiter.set_exception(exc)
+
+ async def _drain_helper(self) -> None:
+ if self._connection_lost:
+ raise ConnectionResetError("Connection lost")
+ if not self._paused:
+ return
+ waiter = self._loop.create_future()
+ self._drain_waiters.append(waiter)
+ try:
+ await waiter
+ finally:
+ self._drain_waiters.remove(waiter)
+
+ def _get_close_waiter(self, stream: asyncio.StreamWriter) -> "asyncio.Future[None]":
+ raise NotImplementedError
+
+
+class SMTPProtocol(FlowControlMixin, asyncio.BaseProtocol):
+ def __init__(
+ self,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ) -> None:
+ super().__init__(loop=loop)
+ self._over_ssl = False
+ self._buffer = bytearray()
+ self._response_waiter: Optional[asyncio.Future[SMTPResponse]] = None
+
+ self.transport: Optional[asyncio.BaseTransport] = None
+ self._command_lock: Optional[asyncio.Lock] = None
+ self._closed: "asyncio.Future[None]" = self._loop.create_future()
+ self._quit_sent = False
+
+ def _get_close_waiter(self, stream: asyncio.StreamWriter) -> "asyncio.Future[None]":
+ return self._closed
+
+ def __del__(self) -> None:
+ # Avoid 'Future exception was never retrieved' warnings
+ # Some unknown race conditions can sometimes trigger these :(
+ self._retrieve_response_exception()
+
+ @property
+ def is_connected(self) -> bool:
+ """
+ Check if our transport is still connected.
+ """
+ return bool(self.transport is not None and not self.transport.is_closing())
+
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
+ self.transport = cast(asyncio.Transport, transport)
+ self._over_ssl = transport.get_extra_info("sslcontext") is not None
+ self._response_waiter = self._loop.create_future()
+ self._command_lock = asyncio.Lock()
+ self._quit_sent = False
+
+ def connection_lost(self, exc: Optional[Exception]) -> None:
+ super().connection_lost(exc)
+
+ if not self._quit_sent:
+ smtp_exc = SMTPServerDisconnected("Connection lost")
+ if exc:
+ smtp_exc.__cause__ = exc
+
+ if self._response_waiter and not self._response_waiter.done():
+ self._response_waiter.set_exception(smtp_exc)
+
+ self.transport = None
+ self._command_lock = None
+
+ def data_received(self, data: bytes) -> None:
+ if self._response_waiter is None:
+ raise RuntimeError(
+ f"data_received called without a response waiter set: {data!r}"
+ )
+ elif self._response_waiter.done():
+ # We got a response without issuing a command; ignore it.
+ return
+
+ self._buffer.extend(data)
+
+ # If we got an obvious partial message, don't try to parse the buffer
+ last_linebreak = data.rfind(b"\n")
+ if (
+ last_linebreak == -1
+ or data[last_linebreak + 3 : last_linebreak + 4] == b"-"
+ ):
+ return
+
+ try:
+ response = self._read_response_from_buffer()
+ except Exception as exc:
+ self._response_waiter.set_exception(exc)
+ else:
+ if response is not None:
+ self._response_waiter.set_result(response)
+
+ def eof_received(self) -> bool:
+ exc = SMTPServerDisconnected("Unexpected EOF received")
+ if self._response_waiter and not self._response_waiter.done():
+ self._response_waiter.set_exception(exc)
+
+ # Returning false closes the transport
+ return False
+
+ def _retrieve_response_exception(self) -> Optional[BaseException]:
+ """
+ Return any exception that has been set on the response waiter.
+
+ Used to avoid 'Future exception was never retrieved' warnings
+ """
+ if (
+ self._response_waiter
+ and self._response_waiter.done()
+ and not self._response_waiter.cancelled()
+ ):
+ return self._response_waiter.exception()
+
+ return None
+
+ def _read_response_from_buffer(self) -> Optional[SMTPResponse]:
+ """Parse the actual response (if any) from the data buffer"""
+ code = -1
+ message = bytearray()
+ offset = 0
+ message_complete = False
+
+ while True:
+ line_end_index = self._buffer.find(b"\n", offset)
+ if line_end_index == -1:
+ break
+
+ line = bytes(self._buffer[offset : line_end_index + 1])
+
+ if len(line) > MAX_LINE_LENGTH:
+ raise SMTPResponseException(
+ SMTPStatus.unrecognized_command, "Response too long"
+ )
+
+ try:
+ code = int(line[:3])
+ except ValueError:
+ raise SMTPResponseException(
+ SMTPStatus.invalid_response.value,
+ f"Malformed SMTP response line: {line!r}",
+ ) from None
+
+ offset += len(line)
+ if len(message):
+ message.extend(b"\n")
+ message.extend(line[4:].strip(b" \t\r\n"))
+ if line[3:4] != b"-":
+ message_complete = True
+ break
+
+ if message_complete:
+ response = SMTPResponse(
+ code, bytes(message).decode("utf-8", "surrogateescape")
+ )
+ del self._buffer[:offset]
+ return response
+ else:
+ return None
+
+ async def read_response(self, timeout: Optional[float] = None) -> SMTPResponse:
+ """
+ Get a status response from the server.
+
+ This method must be awaited once per command sent; if multiple commands
+ are written to the transport without awaiting, response data will be lost.
+
+ Returns an :class:`.response.SMTPResponse` namedtuple consisting of:
+ - server response code (e.g. 250, or such, if all goes well)
+ - server response string (multiline responses are converted to a
+ single, multiline string).
+ """
+ if self._response_waiter is None:
+ raise SMTPServerDisconnected("Connection lost")
+
+ try:
+ result = await asyncio.wait_for(self._response_waiter, timeout)
+ except (TimeoutError, asyncio.TimeoutError) as exc:
+ raise SMTPReadTimeoutError("Timed out waiting for server response") from exc
+ finally:
+ # If we were disconnected, don't create a new waiter
+ if self.transport is None:
+ self._response_waiter = None
+ else:
+ self._response_waiter = self._loop.create_future()
+
+ return result
+
+ def write(self, data: bytes) -> None:
+ if self.transport is None or self.transport.is_closing():
+ raise SMTPServerDisconnected("Connection lost")
+ if not hasattr(self.transport, "write"):
+ raise RuntimeError(
+ f"Transport {self.transport!r} does not support writing."
+ )
+
+ self.transport.write(data) # type: ignore
+
+ async def execute_command(
+ self, *args: bytes, timeout: Optional[float] = None
+ ) -> SMTPResponse:
+ """
+ Sends an SMTP command along with any args to the server, and returns
+ a response.
+ """
+ if self._command_lock is None:
+ raise SMTPServerDisconnected("Server not connected")
+ command = b" ".join(args) + b"\r\n"
+
+ async with self._command_lock:
+ self.write(command)
+
+ if command == b"QUIT\r\n":
+ self._quit_sent = True
+
+ response = await self.read_response(timeout=timeout)
+
+ return response
+
+ async def execute_data_command(
+ self, message: bytes, timeout: Optional[float] = None
+ ) -> SMTPResponse:
+ """
+ Sends an SMTP DATA command to the server, followed by encoded message content.
+
+ Automatically quotes lines beginning with a period per RFC821.
+ Lone \\\\r and \\\\n characters are converted to \\\\r\\\\n
+ characters.
+ """
+ if self._command_lock is None:
+ raise SMTPServerDisconnected("Server not connected")
+
+ message = LINE_ENDINGS_REGEX.sub(b"\r\n", message)
+ message = PERIOD_REGEX.sub(b"..", message)
+ if not message.endswith(b"\r\n"):
+ message += b"\r\n"
+ message += b".\r\n"
+
+ async with self._command_lock:
+ self.write(b"DATA\r\n")
+ start_response = await self.read_response(timeout=timeout)
+ if start_response.code != SMTPStatus.start_input:
+ raise SMTPDataError(start_response.code, start_response.message)
+
+ self.write(message)
+ response = await self.read_response(timeout=timeout)
+ if response.code != SMTPStatus.completed:
+ raise SMTPDataError(response.code, response.message)
+
+ return response
+
+ async def start_tls(
+ self,
+ tls_context: ssl.SSLContext,
+ server_hostname: Optional[str] = None,
+ timeout: Optional[float] = None,
+ ) -> SMTPResponse:
+ """
+ Puts the connection to the SMTP server into TLS mode.
+ """
+ if self._over_ssl:
+ raise RuntimeError("Already using TLS.")
+ if self._command_lock is None:
+ raise SMTPServerDisconnected("Server not connected")
+
+ async with self._command_lock:
+ self.write(b"STARTTLS\r\n")
+ response = await self.read_response(timeout=timeout)
+ if response.code != SMTPStatus.ready:
+ raise SMTPResponseException(response.code, response.message)
+
+ # Check for disconnect after response
+ if self.transport is None or self.transport.is_closing():
+ raise SMTPServerDisconnected("Connection lost")
+
+ try:
+ tls_transport = await self._loop.start_tls(
+ self.transport,
+ self,
+ tls_context,
+ server_side=False,
+ server_hostname=server_hostname,
+ ssl_handshake_timeout=timeout,
+ )
+ except (TimeoutError, asyncio.TimeoutError) as exc:
+ raise SMTPTimeoutError("Timed out while upgrading transport") from exc
+ # SSLProtocol only raises ConnectionAbortedError on timeout
+ except ConnectionAbortedError as exc:
+ raise SMTPTimeoutError(exc.args[0]) from exc
+ except ConnectionResetError as exc:
+ if exc.args:
+ message = exc.args[0]
+ else:
+ message = "Connection was reset while upgrading transport"
+ raise SMTPServerDisconnected(message) from exc
+
+ self.transport = tls_transport
+
+ return response
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/py.typed b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/py.typed
new file mode 100644
index 00000000..527f7352
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/py.typed
@@ -0,0 +1 @@
+This file exists to help mypy (and other tools) find inline type hints. See PR #141 and PEP 561.
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/response.py b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/response.py
new file mode 100644
index 00000000..e965ad3a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/response.py
@@ -0,0 +1,33 @@
+"""
+SMTPResponse class, a simple namedtuple of (code, message).
+"""
+from typing import NamedTuple
+
+
+__all__ = ("SMTPResponse",)
+
+
+class SMTPResponse(NamedTuple):
+ """
+ NamedTuple of server response code and server response message.
+
+ ``code`` and ``message`` can be accessed via attributes or indexes:
+
+ >>> response = SMTPResponse(200, "OK")
+ >>> response.message
+ 'OK'
+ >>> response[0]
+ 200
+ >>> response.code
+ 200
+
+ """
+
+ code: int
+ message: str
+
+ def __repr__(self) -> str:
+ return f"({self.code}, {self.message})"
+
+ def __str__(self) -> str:
+ return f"{self.code} {self.message}"
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/smtp.py b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/smtp.py
new file mode 100644
index 00000000..e91b2f52
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/smtp.py
@@ -0,0 +1,1459 @@
+"""
+Main SMTP client class.
+
+Implements SMTP, ESMTP & Auth methods.
+"""
+import asyncio
+import email.message
+import socket
+import ssl
+from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Type, Union
+
+from .auth import auth_crammd5_verify, auth_login_encode, auth_plain_encode
+from .email import (
+ extract_recipients,
+ extract_sender,
+ flatten_message,
+ parse_address,
+ quote_address,
+)
+from .errors import (
+ SMTPAuthenticationError,
+ SMTPConnectError,
+ SMTPConnectTimeoutError,
+ SMTPException,
+ SMTPHeloError,
+ SMTPNotSupported,
+ SMTPRecipientRefused,
+ SMTPRecipientsRefused,
+ SMTPResponseException,
+ SMTPSenderRefused,
+ SMTPServerDisconnected,
+ SMTPTimeoutError,
+ SMTPConnectResponseError,
+)
+from .esmtp import parse_esmtp_extensions
+from .protocol import SMTPProtocol
+from .response import SMTPResponse
+from .typing import Default, SMTPStatus, SocketPathType, _default
+
+
+__all__ = ("SMTP", "SMTP_PORT", "SMTP_TLS_PORT", "SMTP_STARTTLS_PORT")
+
+SMTP_PORT = 25
+SMTP_TLS_PORT = 465
+SMTP_STARTTLS_PORT = 587
+DEFAULT_TIMEOUT = 60
+
+
+class SMTP:
+ """
+ Main SMTP client class.
+
+ Basic usage:
+
+ >>> event_loop = asyncio.get_event_loop()
+ >>> smtp = aiosmtplib.SMTP(hostname="127.0.0.1", port=1025)
+ >>> event_loop.run_until_complete(smtp.connect())
+ (220, ...)
+ >>> sender = "root@localhost"
+ >>> recipients = ["somebody@localhost"]
+ >>> message = "Hello World"
+ >>> send = smtp.sendmail(sender, recipients, "Hello World")
+ >>> event_loop.run_until_complete(send)
+ ({}, 'OK')
+
+ Keyword arguments can be provided either on :meth:`__init__` or when
+ calling the :meth:`connect` method. Note that in both cases these options,
+ except for ``timeout``, are saved for later use; subsequent calls to
+ :meth:`connect` will use the same options, unless new ones are provided.
+ ``timeout`` is saved for later use when provided on :meth:`__init__`, but
+ not when calling the :meth:`connect` method.
+ """
+
+ # Preferred methods first
+ AUTH_METHODS: Tuple[str, ...] = (
+ "cram-md5",
+ "plain",
+ "login",
+ )
+
+ def __init__(
+ self,
+ *,
+ hostname: Optional[str] = "localhost",
+ port: Optional[int] = None,
+ username: Optional[Union[str, bytes]] = None,
+ password: Optional[Union[str, bytes]] = None,
+ local_hostname: Optional[str] = None,
+ source_address: Optional[Tuple[str, int]] = None,
+ timeout: Optional[float] = DEFAULT_TIMEOUT,
+ use_tls: bool = False,
+ start_tls: Optional[bool] = None,
+ validate_certs: bool = True,
+ client_cert: Optional[str] = None,
+ client_key: Optional[str] = None,
+ tls_context: Optional[ssl.SSLContext] = None,
+ cert_bundle: Optional[str] = None,
+ socket_path: Optional[SocketPathType] = None,
+ sock: Optional[socket.socket] = None,
+ ) -> None:
+ """
+ :keyword hostname: Server name (or IP) to connect to. Defaults to "localhost".
+ :keyword port: Server port. Defaults ``465`` if ``use_tls`` is ``True``,
+ ``587`` if ``start_tls`` is ``True``, or ``25`` otherwise.
+ :keyword username: Username to login as after connect.
+ :keyword password: Password for login after connect.
+ :keyword local_hostname: The hostname of the client. If specified, used as the
+ FQDN of the local host in the HELO/EHLO command. Otherwise, the result of
+ :func:`socket.getfqdn`. **Note that getfqdn will block the event loop.**
+ :keyword source_address: Takes a 2-tuple (host, port) for the socket to bind to
+ as its source address before connecting. If the host is '' and port is 0,
+ the OS default behavior will be used.
+ :keyword timeout: Default timeout value for the connection, in seconds.
+ Defaults to 60.
+ :keyword use_tls: If True, make the initial connection to the server
+ over TLS/SSL. Mutually exclusive with ``start_tls``; if the server uses
+ STARTTLS, ``use_tls`` should be ``False``.
+ :keyword start_tls: Flag to initiate a STARTTLS upgrade on connect.
+ If ``None`` (the default), upgrade will be initiated if supported by the
+ server.
+ If ``True``, and upgrade will be initiated regardless of server support.
+ If ``False``, no upgrade will occur.
+ Mutually exclusive with ``use_tls``.
+ :keyword validate_certs: Determines if server certificates are
+ validated. Defaults to ``True``.
+ :keyword client_cert: Path to client side certificate, for TLS.
+ :keyword client_key: Path to client side key, for TLS.
+ :keyword tls_context: An existing :py:class:`ssl.SSLContext`, for TLS.
+ Mutually exclusive with ``client_cert``/``client_key``.
+ :keyword cert_bundle: Path to certificate bundle, for TLS verification.
+ :keyword socket_path: Path to a Unix domain socket. Not compatible with
+ hostname or port. Accepts str, bytes, or a pathlike object.
+ :keyword sock: An existing, connected socket object. If given, none of
+ hostname, port, or socket_path should be provided.
+
+ :raises ValueError: mutually exclusive options provided
+ """
+ self.protocol: Optional[SMTPProtocol] = None
+ self.transport: Optional[asyncio.BaseTransport] = None
+
+ # Kwarg defaults are provided here, and saved for connect.
+ self.hostname = hostname
+ self.port = port
+ self._login_username = username
+ self._login_password = password
+ self._local_hostname = local_hostname
+ self.timeout = timeout
+ self.use_tls = use_tls
+ self._start_tls_on_connect = start_tls
+ self.validate_certs = validate_certs
+ self.client_cert = client_cert
+ self.client_key = client_key
+ self.tls_context = tls_context
+ self.cert_bundle = cert_bundle
+ self.socket_path = socket_path
+ self.sock = sock
+ self.source_address = source_address
+
+ self.loop: Optional[asyncio.AbstractEventLoop] = None
+ self._connect_lock: Optional[asyncio.Lock] = None
+ self.last_helo_response: Optional[SMTPResponse] = None
+ self._last_ehlo_response: Optional[SMTPResponse] = None
+ self.esmtp_extensions: Dict[str, str] = {}
+ self.supports_esmtp = False
+ self.server_auth_methods: List[str] = []
+ self._sendmail_lock: Optional[asyncio.Lock] = None
+
+ self._validate_config()
+
+ async def __aenter__(self) -> "SMTP":
+ if not self.is_connected:
+ await self.connect()
+
+ return self
+
+ async def __aexit__(
+ self, exc_type: Type[BaseException], exc: BaseException, traceback: Any
+ ) -> None:
+ if isinstance(exc, (ConnectionError, TimeoutError)):
+ self.close()
+ return
+
+ try:
+ await self.quit()
+ except (SMTPServerDisconnected, SMTPResponseException, SMTPTimeoutError):
+ pass
+
+ @property
+ def is_connected(self) -> bool:
+ """
+ Check if our transport is still connected.
+ """
+ return bool(self.protocol is not None and self.protocol.is_connected)
+
+ @property
+ def local_hostname(self) -> str:
+ """
+ Get the system hostname to be sent to the SMTP server.
+ Simply caches the result of :func:`socket.getfqdn`.
+ """
+ if self._local_hostname is None:
+ self._local_hostname = socket.getfqdn()
+
+ return self._local_hostname
+
+ @property
+ def last_ehlo_response(self) -> Union[SMTPResponse, None]:
+ return self._last_ehlo_response
+
+ @last_ehlo_response.setter
+ def last_ehlo_response(self, response: SMTPResponse) -> None:
+ """
+ When setting the last EHLO response, parse the message for supported
+ extensions and auth methods.
+ """
+ extensions, auth_methods = parse_esmtp_extensions(response.message)
+ self._last_ehlo_response = response
+ self.esmtp_extensions = extensions
+ self.server_auth_methods = auth_methods
+ self.supports_esmtp = True
+
+ @property
+ def is_ehlo_or_helo_needed(self) -> bool:
+ """
+ Check if we've already received a response to an EHLO or HELO command.
+ """
+ return self.last_ehlo_response is None and self.last_helo_response is None
+
+ @property
+ def supported_auth_methods(self) -> List[str]:
+ """
+ Get all AUTH methods supported by the both server and by us.
+ """
+ return [auth for auth in self.AUTH_METHODS if auth in self.server_auth_methods]
+
+ def _update_settings_from_kwargs(
+ self,
+ hostname: Optional[Union[str, Default]] = _default,
+ port: Optional[Union[int, Default]] = _default,
+ username: Optional[Union[str, bytes, Default]] = _default,
+ password: Optional[Union[str, bytes, Default]] = _default,
+ local_hostname: Optional[Union[str, Default]] = _default,
+ source_address: Optional[Union[Tuple[str, int], Default]] = _default,
+ use_tls: Optional[bool] = None,
+ start_tls: Optional[Union[bool, Default]] = _default,
+ validate_certs: Optional[bool] = None,
+ client_cert: Optional[Union[str, Default]] = _default,
+ client_key: Optional[Union[str, Default]] = _default,
+ tls_context: Optional[Union[ssl.SSLContext, Default]] = _default,
+ cert_bundle: Optional[Union[str, Default]] = _default,
+ socket_path: Optional[Union[SocketPathType, Default]] = _default,
+ sock: Optional[Union[socket.socket, Default]] = _default,
+ ) -> None:
+ """Update our configuration from the kwargs provided.
+
+ This method can be called multiple times.
+ """
+ if hostname is not _default:
+ self.hostname = hostname
+ if use_tls is not None:
+ self.use_tls = use_tls
+ if start_tls is not _default:
+ self._start_tls_on_connect = start_tls
+ if validate_certs is not None:
+ self.validate_certs = validate_certs
+ if port is not _default:
+ self.port = port
+ if username is not _default:
+ self._login_username = username
+ if password is not _default:
+ self._login_password = password
+
+ if local_hostname is not _default:
+ self._local_hostname = local_hostname
+ if source_address is not _default:
+ self.source_address = source_address
+ if client_cert is not _default:
+ self.client_cert = client_cert
+ if client_key is not _default:
+ self.client_key = client_key
+ if tls_context is not _default:
+ self.tls_context = tls_context
+ if cert_bundle is not _default:
+ self.cert_bundle = cert_bundle
+ if socket_path is not _default:
+ self.socket_path = socket_path
+ if sock is not _default:
+ self.sock = sock
+
+ def _validate_config(self) -> None:
+ if self._start_tls_on_connect and self.use_tls:
+ raise ValueError("The start_tls and use_tls options are not compatible.")
+
+ if self.tls_context is not None and self.client_cert is not None:
+ raise ValueError(
+ "Either a TLS context or a certificate/key must be provided"
+ )
+
+ if self.sock is not None and any([self.hostname, self.port, self.socket_path]):
+ raise ValueError(
+ "The socket option is not compatible with hostname, port or socket_path"
+ )
+
+ if self.socket_path is not None and any([self.hostname, self.port]):
+ raise ValueError(
+ "The socket_path option is not compatible with hostname/port"
+ )
+
+ if self._local_hostname is not None and (
+ "\r" in self._local_hostname or "\n" in self._local_hostname
+ ):
+ raise ValueError(
+ "The local_hostname param contains prohibited newline characters"
+ )
+
+ if self.hostname is not None and (
+ "\r" in self.hostname or "\n" in self.hostname
+ ):
+ raise ValueError(
+ "The hostname param contains prohibited newline characters"
+ )
+
+ def _get_default_port(self) -> int:
+ """
+ Return an appropriate default port, based on options selected.
+ """
+ if self.use_tls:
+ return SMTP_TLS_PORT
+ elif self._start_tls_on_connect:
+ return SMTP_STARTTLS_PORT
+
+ return SMTP_PORT
+
+ async def connect(
+ self,
+ *,
+ hostname: Optional[Union[str, Default]] = _default,
+ port: Optional[Union[int, Default]] = _default,
+ username: Optional[Union[str, bytes, Default]] = _default,
+ password: Optional[Union[str, bytes, Default]] = _default,
+ local_hostname: Optional[Union[str, Default]] = _default,
+ source_address: Optional[Union[Tuple[str, int], Default]] = _default,
+ timeout: Optional[Union[float, Default]] = _default,
+ use_tls: Optional[bool] = None,
+ start_tls: Optional[Union[bool, Default]] = _default,
+ validate_certs: Optional[bool] = None,
+ client_cert: Optional[Union[str, Default]] = _default,
+ client_key: Optional[Union[str, Default]] = _default,
+ tls_context: Optional[Union[ssl.SSLContext, Default]] = _default,
+ cert_bundle: Optional[Union[str, Default]] = _default,
+ socket_path: Optional[Union[SocketPathType, Default]] = _default,
+ sock: Optional[Union[socket.socket, Default]] = _default,
+ ) -> SMTPResponse:
+ """
+ Initialize a connection to the server. Options provided to
+ :meth:`.connect` take precedence over those used to initialize the
+ class.
+
+ :keyword hostname: Server name (or IP) to connect to. Defaults to "localhost".
+ :keyword port: Server port. Defaults ``465`` if ``use_tls`` is ``True``,
+ ``587`` if ``start_tls`` is ``True``, or ``25`` otherwise.
+ :keyword username: Username to login as after connect.
+ :keyword password: Password for login after connect.
+ :keyword local_hostname: The hostname of the client. If specified, used as the
+ FQDN of the local host in the HELO/EHLO command. Otherwise, the result of
+ :func:`socket.getfqdn`. **Note that getfqdn will block the event loop.**
+ :keyword source_address: Takes a 2-tuple (host, port) for the socket to bind to
+ as its source address before connecting. If the host is '' and port is 0,
+ the OS default behavior will be used.
+ :keyword timeout: Default timeout value for the connection, in seconds.
+ Defaults to 60.
+ :keyword use_tls: If True, make the initial connection to the server
+ over TLS/SSL. Mutually exclusive with ``start_tls``; if the server uses
+ STARTTLS, ``use_tls`` should be ``False``.
+ :keyword start_tls: Flag to initiate a STARTTLS upgrade on connect.
+ If ``None`` (the default), upgrade will be initiated if supported by the
+ server.
+ If ``True``, and upgrade will be initiated regardless of server support.
+ If ``False``, no upgrade will occur.
+ Mutually exclusive with ``use_tls``.
+ :keyword validate_certs: Determines if server certificates are
+ validated. Defaults to ``True``.
+ :keyword client_cert: Path to client side certificate, for TLS.
+ :keyword client_key: Path to client side key, for TLS.
+ :keyword tls_context: An existing :py:class:`ssl.SSLContext`, for TLS.
+ Mutually exclusive with ``client_cert``/``client_key``.
+ :keyword cert_bundle: Path to certificate bundle, for TLS verification.
+ :keyword socket_path: Path to a Unix domain socket. Not compatible with
+ hostname or port. Accepts str, bytes, or a pathlike object.
+ :keyword sock: An existing, connected socket object. If given, none of
+ hostname, port, or socket_path should be provided.
+
+ :raises ValueError: mutually exclusive options provided
+ """
+ self._update_settings_from_kwargs(
+ hostname=hostname,
+ port=port,
+ local_hostname=local_hostname,
+ source_address=source_address,
+ use_tls=use_tls,
+ start_tls=start_tls,
+ validate_certs=validate_certs,
+ client_cert=client_cert,
+ client_key=client_key,
+ tls_context=tls_context,
+ cert_bundle=cert_bundle,
+ socket_path=socket_path,
+ sock=sock,
+ username=username,
+ password=password,
+ )
+ self._validate_config()
+
+ self.loop = asyncio.get_running_loop()
+ if self._connect_lock is None:
+ self._connect_lock = asyncio.Lock()
+ await self._connect_lock.acquire()
+
+ # Set default port last in case use_tls or start_tls is provided,
+ # and only if we're not using a socket.
+ if self.port is None and self.sock is None and self.socket_path is None:
+ self.port = self._get_default_port()
+
+ try:
+ response = await self._create_connection(
+ timeout=self.timeout if timeout is _default else timeout
+ )
+ except Exception as exc:
+ self.close() # Reset our state to disconnected
+ raise exc
+
+ await self._maybe_start_tls_on_connect()
+ await self._maybe_login_on_connect()
+
+ return response
+
+ async def _create_connection(self, timeout: Optional[float]) -> SMTPResponse:
+ if self.loop is None:
+ raise RuntimeError("No event loop set")
+
+ protocol = SMTPProtocol(loop=self.loop)
+
+ tls_context: Optional[ssl.SSLContext] = None
+ ssl_handshake_timeout: Optional[float] = None
+ if self.use_tls:
+ tls_context = self._get_tls_context()
+ ssl_handshake_timeout = timeout
+
+ if self.sock is not None:
+ connect_coro = self.loop.create_connection(
+ lambda: protocol,
+ sock=self.sock,
+ ssl=tls_context,
+ ssl_handshake_timeout=ssl_handshake_timeout,
+ )
+ elif self.socket_path is not None:
+ connect_coro = self.loop.create_unix_connection(
+ lambda: protocol,
+ path=self.socket_path, # type: ignore
+ ssl=tls_context,
+ ssl_handshake_timeout=ssl_handshake_timeout,
+ )
+ else:
+ if self.hostname is None:
+ raise RuntimeError("No hostname provided; default should have been set")
+ if self.port is None:
+ raise RuntimeError("No port provided; default should have been set")
+
+ connect_coro = self.loop.create_connection(
+ lambda: protocol,
+ host=self.hostname,
+ port=self.port,
+ ssl=tls_context,
+ ssl_handshake_timeout=ssl_handshake_timeout,
+ local_addr=self.source_address,
+ )
+
+ try:
+ transport, _ = await asyncio.wait_for(connect_coro, timeout=timeout)
+ except (TimeoutError, asyncio.TimeoutError) as exc:
+ raise SMTPConnectTimeoutError(
+ f"Timed out connecting to {self.hostname} on port {self.port}"
+ ) from exc
+ except OSError as exc:
+ raise SMTPConnectError(
+ f"Error connecting to {self.hostname} on port {self.port}: {exc}"
+ ) from exc
+
+ self.protocol = protocol
+ self.transport = transport
+
+ try:
+ response = await protocol.read_response(timeout=timeout)
+ except SMTPServerDisconnected as exc:
+ raise SMTPConnectError(
+ f"Error connecting to {self.hostname} on port {self.port}: {exc}"
+ ) from exc
+ except SMTPTimeoutError as exc:
+ raise SMTPConnectTimeoutError(
+ "Timed out waiting for server ready message"
+ ) from exc
+
+ if response.code != SMTPStatus.ready:
+ raise SMTPConnectResponseError(response.code, response.message)
+
+ return response
+
+ def _connection_lost(self, waiter: "asyncio.Future[None]") -> None:
+ self.close()
+
+ async def _maybe_start_tls_on_connect(self) -> None:
+ """
+ Depending on config, upgrade the connection via STARTTLS.
+ """
+ if self._start_tls_on_connect is True:
+ await self.starttls()
+ # If _start_tls_on_connect hasn't been set either way,
+ # try to STARTTLS if supported, with graceful failure handling
+ elif self._start_tls_on_connect is None:
+ already_using_tls = self.get_transport_info("sslcontext") is not None
+ if not (self.use_tls or already_using_tls):
+ await self._ehlo_or_helo_if_needed()
+ if self.supports_extension("starttls"):
+ await self.starttls()
+
+ async def _maybe_login_on_connect(self) -> None:
+ """
+ Depending on config, login after connecting.
+ """
+ if self._login_username is not None:
+ login_password = (
+ self._login_password if self._login_password is not None else ""
+ )
+ await self.login(self._login_username, login_password)
+
+ async def execute_command(
+ self, *args: bytes, timeout: Optional[Union[float, Default]] = _default
+ ) -> SMTPResponse:
+ """
+ Check that we're connected, if we got a timeout value, and then
+ pass the command to the protocol.
+
+ :raises SMTPServerDisconnected: connection lost
+ """
+ if self.protocol is None:
+ raise SMTPServerDisconnected("Server not connected")
+
+ response = await self.protocol.execute_command(
+ *args, timeout=self.timeout if timeout is _default else timeout
+ )
+
+ # If the server is unavailable, be nice and close the connection
+ if response.code == SMTPStatus.domain_unavailable:
+ self.close()
+
+ return response
+
+ def _get_tls_context(self) -> ssl.SSLContext:
+ """
+ Build an SSLContext object from the options we've been given.
+ """
+ if self.tls_context is not None:
+ context = self.tls_context
+ else:
+ # SERVER_AUTH is what we want for a client side socket
+ context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
+ context.check_hostname = bool(self.validate_certs)
+ if self.validate_certs:
+ context.verify_mode = ssl.CERT_REQUIRED
+ else:
+ context.verify_mode = ssl.CERT_NONE
+
+ if self.cert_bundle is not None:
+ context.load_verify_locations(cafile=self.cert_bundle)
+
+ if self.client_cert is not None:
+ context.load_cert_chain(self.client_cert, keyfile=self.client_key)
+
+ return context
+
+ def close(self) -> None:
+ """
+ Closes the connection.
+ """
+ if self.transport is not None and not self.transport.is_closing():
+ self.transport.close()
+
+ if self._connect_lock is not None and self._connect_lock.locked():
+ self._connect_lock.release()
+
+ self.protocol = None
+ self.transport = None
+
+ # Reset ESMTP state
+ self._reset_server_state()
+
+ def get_transport_info(self, key: str) -> Any:
+ """
+ Get extra info from the transport.
+ Supported keys:
+
+ - ``peername``
+ - ``socket``
+ - ``sockname``
+ - ``compression``
+ - ``cipher``
+ - ``peercert``
+ - ``sslcontext``
+ - ``sslobject``
+
+ :raises SMTPServerDisconnected: connection lost
+ """
+ if not (self.is_connected and self.transport):
+ raise SMTPServerDisconnected("Server not connected")
+
+ return self.transport.get_extra_info(key)
+
+ # Base SMTP commands #
+
+ async def helo(
+ self,
+ *,
+ hostname: Optional[str] = None,
+ timeout: Optional[Union[float, Default]] = _default,
+ ) -> SMTPResponse:
+ """
+ Send the SMTP HELO command.
+ Hostname to send for this command defaults to the FQDN of the local
+ host.
+
+ :raises SMTPHeloError: on unexpected server response code
+ """
+ response = self.last_helo_response = await self.execute_command(
+ b"HELO", (hostname or self.local_hostname).encode("ascii"), timeout=timeout
+ )
+
+ if response.code != SMTPStatus.completed:
+ raise SMTPHeloError(response.code, response.message)
+
+ return response
+
+ async def help(self, *, timeout: Optional[Union[float, Default]] = _default) -> str:
+ """
+ Send the SMTP HELP command, which responds with help text.
+
+ :raises SMTPResponseException: on unexpected server response code
+ """
+ await self._ehlo_or_helo_if_needed()
+
+ response = await self.execute_command(b"HELP", timeout=timeout)
+ if response.code not in (
+ SMTPStatus.system_status_ok,
+ SMTPStatus.help_message,
+ SMTPStatus.completed,
+ ):
+ raise SMTPResponseException(response.code, response.message)
+
+ return response.message
+
+ async def rset(
+ self, *, timeout: Optional[Union[float, Default]] = _default
+ ) -> SMTPResponse:
+ """
+ Send an SMTP RSET command, which resets the server's envelope
+ (the envelope contains the sender, recipient, and mail data).
+
+ :raises SMTPResponseException: on unexpected server response code
+ """
+ await self._ehlo_or_helo_if_needed()
+
+ response = await self.execute_command(b"RSET", timeout=timeout)
+ if response.code != SMTPStatus.completed:
+ raise SMTPResponseException(response.code, response.message)
+
+ return response
+
+ async def noop(
+ self, *, timeout: Optional[Union[float, Default]] = _default
+ ) -> SMTPResponse:
+ """
+ Send an SMTP NOOP command, which does nothing.
+
+ :raises SMTPResponseException: on unexpected server response code
+ """
+ await self._ehlo_or_helo_if_needed()
+
+ response = await self.execute_command(b"NOOP", timeout=timeout)
+ if response.code != SMTPStatus.completed:
+ raise SMTPResponseException(response.code, response.message)
+
+ return response
+
+ async def vrfy(
+ self,
+ address: str,
+ /,
+ *,
+ options: Optional[Iterable[str]] = None,
+ timeout: Optional[Union[float, Default]] = _default,
+ ) -> SMTPResponse:
+ """
+ Send an SMTP VRFY command, which tests an address for validity.
+ Not many servers support this command.
+
+ :raises SMTPResponseException: on unexpected server response code
+ """
+ await self._ehlo_or_helo_if_needed()
+
+ if options is None:
+ options = []
+
+ parsed_address = parse_address(address)
+ if any(option.lower() == "smtputf8" for option in options):
+ if not self.supports_extension("smtputf8"):
+ raise SMTPNotSupported("SMTPUTF8 is not supported by this server")
+ addr_bytes = parsed_address.encode("utf-8")
+ else:
+ addr_bytes = parsed_address.encode("ascii")
+ options_bytes = [option.encode("ascii") for option in options]
+
+ response = await self.execute_command(
+ b"VRFY", addr_bytes, *options_bytes, timeout=timeout
+ )
+
+ if response.code not in (
+ SMTPStatus.completed,
+ SMTPStatus.will_forward,
+ SMTPStatus.cannot_vrfy,
+ ):
+ raise SMTPResponseException(response.code, response.message)
+
+ return response
+
+ async def expn(
+ self,
+ address: str,
+ /,
+ *,
+ options: Optional[Iterable[str]] = None,
+ timeout: Optional[Union[float, Default]] = _default,
+ ) -> SMTPResponse:
+ """
+ Send an SMTP EXPN command, which expands a mailing list.
+ Not many servers support this command.
+
+ :raises SMTPResponseException: on unexpected server response code
+ """
+ await self._ehlo_or_helo_if_needed()
+
+ if options is None:
+ options = []
+
+ parsed_address = parse_address(address)
+ if any(option.lower() == "smtputf8" for option in options):
+ if not self.supports_extension("smtputf8"):
+ raise SMTPNotSupported("SMTPUTF8 is not supported by this server")
+ addr_bytes = parsed_address.encode("utf-8")
+ else:
+ addr_bytes = parsed_address.encode("ascii")
+ options_bytes = [option.encode("ascii") for option in options]
+
+ response = await self.execute_command(
+ b"EXPN", addr_bytes, *options_bytes, timeout=timeout
+ )
+
+ if response.code != SMTPStatus.completed:
+ raise SMTPResponseException(response.code, response.message)
+
+ return response
+
+ async def quit(
+ self, *, timeout: Optional[Union[float, Default]] = _default
+ ) -> SMTPResponse:
+ """
+ Send the SMTP QUIT command, which closes the connection.
+ Also closes the connection from our side after a response is received.
+
+ :raises SMTPResponseException: on unexpected server response code
+ """
+ response = await self.execute_command(b"QUIT", timeout=timeout)
+ if response.code != SMTPStatus.closing:
+ raise SMTPResponseException(response.code, response.message)
+
+ self.close()
+
+ return response
+
+ async def mail(
+ self,
+ sender: str,
+ /,
+ *,
+ options: Optional[Iterable[str]] = None,
+ encoding: str = "ascii",
+ timeout: Optional[Union[float, Default]] = _default,
+ ) -> SMTPResponse:
+ """
+ Send an SMTP MAIL command, which specifies the message sender and
+ begins a new mail transfer session ("envelope").
+
+ :raises SMTPSenderRefused: on unexpected server response code
+ """
+ await self._ehlo_or_helo_if_needed()
+
+ if options is None:
+ options = []
+
+ quoted_sender = quote_address(sender)
+ addr_bytes = quoted_sender.encode(encoding)
+ options_bytes = [option.encode("ascii") for option in options]
+
+ response = await self.execute_command(
+ b"MAIL", b"FROM:" + addr_bytes, *options_bytes, timeout=timeout
+ )
+
+ if response.code != SMTPStatus.completed:
+ raise SMTPSenderRefused(response.code, response.message, sender)
+
+ return response
+
+ async def rcpt(
+ self,
+ recipient: str,
+ /,
+ *,
+ options: Optional[Iterable[str]] = None,
+ encoding: str = "ascii",
+ timeout: Optional[Union[float, Default]] = _default,
+ ) -> SMTPResponse:
+ """
+ Send an SMTP RCPT command, which specifies a single recipient for
+ the message. This command is sent once per recipient and must be
+ preceded by 'MAIL'.
+
+ :raises SMTPRecipientRefused: on unexpected server response code
+ """
+ await self._ehlo_or_helo_if_needed()
+
+ if options is None:
+ options = []
+
+ quoted_recipient = quote_address(recipient)
+ addr_bytes = quoted_recipient.encode(encoding)
+ options_bytes = [option.encode("ascii") for option in options]
+
+ response = await self.execute_command(
+ b"RCPT", b"TO:" + addr_bytes, *options_bytes, timeout=timeout
+ )
+
+ if response.code not in (SMTPStatus.completed, SMTPStatus.will_forward):
+ raise SMTPRecipientRefused(response.code, response.message, recipient)
+
+ return response
+
+ async def data(
+ self,
+ message: Union[str, bytes],
+ /,
+ *,
+ timeout: Optional[Union[float, Default]] = _default,
+ ) -> SMTPResponse:
+ """
+ Send an SMTP DATA command, followed by the message given.
+ This method transfers the actual email content to the server.
+
+ :raises SMTPDataError: on unexpected server response code
+ :raises SMTPServerDisconnected: connection lost
+ """
+ await self._ehlo_or_helo_if_needed()
+
+ # As data accesses protocol directly, some handling is required
+ if self.protocol is None:
+ raise SMTPServerDisconnected("Connection lost")
+
+ if timeout is _default:
+ timeout = self.timeout
+
+ if isinstance(message, str):
+ message = message.encode("ascii")
+
+ return await self.protocol.execute_data_command(message, timeout=timeout)
+
+ # ESMTP commands #
+
+ async def ehlo(
+ self,
+ *,
+ hostname: Optional[str] = None,
+ timeout: Optional[Union[float, Default]] = _default,
+ ) -> SMTPResponse:
+ """
+ Send the SMTP EHLO command.
+ Hostname to send for this command defaults to the FQDN of the local
+ host.
+
+ :raises SMTPHeloError: on unexpected server response code
+ """
+ if hostname is None:
+ hostname = self.local_hostname
+
+ response = await self.execute_command(
+ b"EHLO", hostname.encode("ascii"), timeout=timeout
+ )
+ self.last_ehlo_response = response
+
+ if response.code != SMTPStatus.completed:
+ raise SMTPHeloError(response.code, response.message)
+
+ return response
+
+ def supports_extension(self, extension: str, /) -> bool:
+ """
+ Tests if the server supports the ESMTP service extension given.
+ """
+ return extension.lower() in self.esmtp_extensions
+
+ async def _ehlo_or_helo_if_needed(self) -> None:
+ """
+ Call self.ehlo() and/or self.helo() if needed.
+
+ If there has been no previous EHLO or HELO command this session, this
+ method tries ESMTP EHLO first.
+ """
+ if self.is_ehlo_or_helo_needed:
+ try:
+ await self.ehlo()
+ except SMTPHeloError as exc:
+ if self.is_connected:
+ await self.helo()
+ else:
+ raise exc
+
+ def _reset_server_state(self) -> None:
+ """
+ Clear stored information about the server.
+ """
+ self.last_helo_response = None
+ self._last_ehlo_response = None
+ self.esmtp_extensions = {}
+ self.supports_esmtp = False
+ self.server_auth_methods = []
+
+ async def starttls(
+ self,
+ *,
+ server_hostname: Optional[str] = None,
+ validate_certs: Optional[bool] = None,
+ client_cert: Optional[Union[str, Default]] = _default,
+ client_key: Optional[Union[str, Default]] = _default,
+ cert_bundle: Optional[Union[str, Default]] = _default,
+ tls_context: Optional[Union[ssl.SSLContext, Default]] = _default,
+ timeout: Optional[Union[float, Default]] = _default,
+ ) -> SMTPResponse:
+ """
+ Puts the connection to the SMTP server into TLS mode.
+
+ If there has been no previous EHLO or HELO command this session, this
+ method tries ESMTP EHLO first.
+
+ If the server supports TLS, this will encrypt the rest of the SMTP
+ session. If you provide the keyfile and certfile parameters,
+ the identity of the SMTP server and client can be checked (if
+ validate_certs is True). You can also provide a custom SSLContext
+ object. If no certs or SSLContext is given, and TLS config was
+ provided when initializing the class, STARTTLS will use to that,
+ otherwise it will use the Python defaults.
+
+ :raises SMTPException: server does not support STARTTLS
+ :raises SMTPServerDisconnected: connection lost
+ :raises ValueError: invalid options provided
+ """
+ await self._ehlo_or_helo_if_needed()
+
+ if self.protocol is None:
+ raise SMTPServerDisconnected("Server not connected")
+
+ if self.get_transport_info("sslcontext") is not None:
+ raise SMTPException("Connection already using TLS")
+
+ self._update_settings_from_kwargs(
+ validate_certs=validate_certs,
+ client_cert=client_cert,
+ client_key=client_key,
+ cert_bundle=cert_bundle,
+ tls_context=tls_context,
+ )
+ self._validate_config()
+
+ if server_hostname is None:
+ server_hostname = self.hostname
+
+ if timeout is _default:
+ timeout = self.timeout
+
+ tls_context = self._get_tls_context()
+
+ if not self.supports_extension("starttls"):
+ raise SMTPException("SMTP STARTTLS extension not supported by server.")
+
+ response = await self.protocol.start_tls(
+ tls_context, server_hostname=server_hostname, timeout=timeout
+ )
+ if self.protocol is None:
+ raise SMTPServerDisconnected("Connection lost")
+ # Update our transport reference
+ self.transport = self.protocol.transport
+
+ # RFC 3207 part 4.2:
+ # The client MUST discard any knowledge obtained from the server, such
+ # as the list of SMTP service extensions, which was not obtained from
+ # the TLS negotiation itself.
+ self._reset_server_state()
+
+ return response
+
+ # Auth commands
+
+ async def login(
+ self,
+ username: Union[str, bytes],
+ password: Union[str, bytes],
+ /,
+ timeout: Optional[Union[float, Default]] = _default,
+ ) -> SMTPResponse:
+ """
+ Tries to login with supported auth methods.
+
+ Some servers advertise authentication methods they don't really
+ support, so if authentication fails, we continue until we've tried
+ all methods.
+ """
+ await self._ehlo_or_helo_if_needed()
+
+ if not self.supports_extension("auth"):
+ if self.is_connected and self.get_transport_info("sslcontext") is None:
+ raise SMTPException(
+ "The SMTP AUTH extension is not supported by this server. Try "
+ "connecting via TLS (or STARTTLS)."
+ )
+ raise SMTPException(
+ "The SMTP AUTH extension is not supported by this server."
+ )
+
+ response: Optional[SMTPResponse] = None
+ exception: Optional[SMTPAuthenticationError] = None
+ for auth_name in self.supported_auth_methods:
+ method_name = f'auth_{auth_name.replace("-", "")}'
+ try:
+ auth_method = getattr(self, method_name)
+ except AttributeError as err:
+ raise RuntimeError(
+ f"Missing handler for auth method {auth_name}"
+ ) from err
+ try:
+ response = await auth_method(username, password, timeout=timeout)
+ except SMTPAuthenticationError as exc:
+ exception = exc
+ else:
+ # No exception means we're good
+ break
+
+ if response is None:
+ raise exception or SMTPException("No suitable authentication method found.")
+
+ return response
+
+ async def auth_crammd5(
+ self,
+ username: Union[str, bytes],
+ password: Union[str, bytes],
+ /,
+ *,
+ timeout: Optional[Union[float, Default]] = _default,
+ ) -> SMTPResponse:
+ """
+ CRAM-MD5 auth uses the password as a shared secret to MD5 the server's
+ response.
+
+ Example::
+
+ 250 AUTH CRAM-MD5
+ auth cram-md5
+ 334 PDI0NjA5LjEwNDc5MTQwNDZAcG9wbWFpbC5TcGFjZS5OZXQ+
+ dGltIGI5MTNhNjAyYzdlZGE3YTQ5NWI0ZTZlNzMzNGQzODkw
+
+ """
+ initial_response = await self.execute_command(
+ b"AUTH", b"CRAM-MD5", timeout=timeout
+ )
+
+ if initial_response.code != SMTPStatus.auth_continue:
+ raise SMTPAuthenticationError(
+ initial_response.code, initial_response.message
+ )
+
+ verification_bytes = auth_crammd5_verify(
+ username, password, initial_response.message
+ )
+ response = await self.execute_command(verification_bytes)
+
+ if response.code != SMTPStatus.auth_successful:
+ raise SMTPAuthenticationError(response.code, response.message)
+
+ return response
+
+ async def auth_plain(
+ self,
+ username: Union[str, bytes],
+ password: Union[str, bytes],
+ /,
+ *,
+ timeout: Optional[Union[float, Default]] = _default,
+ ) -> SMTPResponse:
+ """
+ PLAIN auth encodes the username and password in one Base64 encoded
+ string. No verification message is required.
+
+ Example::
+
+ 220-esmtp.example.com
+ AUTH PLAIN dGVzdAB0ZXN0AHRlc3RwYXNz
+ 235 ok, go ahead (#2.0.0)
+
+ """
+ encoded = auth_plain_encode(username, password)
+ response = await self.execute_command(
+ b"AUTH", b"PLAIN", encoded, timeout=timeout
+ )
+
+ if response.code != SMTPStatus.auth_successful:
+ raise SMTPAuthenticationError(response.code, response.message)
+
+ return response
+
+ async def auth_login(
+ self,
+ username: Union[str, bytes],
+ password: Union[str, bytes],
+ /,
+ *,
+ timeout: Optional[Union[float, Default]] = _default,
+ ) -> SMTPResponse:
+ """
+ LOGIN auth sends the Base64 encoded username and password in sequence.
+
+ Example::
+
+ 250 AUTH LOGIN PLAIN CRAM-MD5
+ auth login avlsdkfj
+ 334 UGFzc3dvcmQ6
+ avlsdkfj
+
+ Note that there is an alternate version sends the username
+ as a separate command::
+
+ 250 AUTH LOGIN PLAIN CRAM-MD5
+ auth login
+ 334 VXNlcm5hbWU6
+ avlsdkfj
+ 334 UGFzc3dvcmQ6
+ avlsdkfj
+
+ However, since most servers seem to support both, we send the username
+ with the initial request.
+ """
+ encoded_username, encoded_password = auth_login_encode(username, password)
+ initial_response = await self.execute_command(
+ b"AUTH", b"LOGIN", encoded_username, timeout=timeout
+ )
+
+ if initial_response.code != SMTPStatus.auth_continue:
+ raise SMTPAuthenticationError(
+ initial_response.code, initial_response.message
+ )
+
+ response = await self.execute_command(encoded_password, timeout=timeout)
+
+ if response.code != SMTPStatus.auth_successful:
+ raise SMTPAuthenticationError(response.code, response.message)
+
+ return response
+
+ async def sendmail(
+ self,
+ sender: str,
+ recipients: Union[str, Sequence[str]],
+ message: Union[str, bytes],
+ /,
+ *,
+ mail_options: Optional[Iterable[str]] = None,
+ rcpt_options: Optional[Iterable[str]] = None,
+ timeout: Optional[Union[float, Default]] = _default,
+ ) -> Tuple[Dict[str, SMTPResponse], str]:
+ """
+ This command performs an entire mail transaction.
+
+ The arguments are:
+ - sender: The address sending this mail.
+ - recipients: A list of addresses to send this mail to. A bare
+ string will be treated as a list with 1 address.
+ - message: The message string to send.
+ - mail_options: List of options (such as ESMTP 8bitmime) for the
+ MAIL command.
+ - rcpt_options: List of options (such as DSN commands) for all the
+ RCPT commands.
+
+ message must be a string containing characters in the ASCII range.
+ The string is encoded to bytes using the ascii codec, and lone \\\\r
+ and \\\\n characters are converted to \\\\r\\\\n characters.
+
+ If there has been no previous HELO or EHLO command this session, this
+ method tries EHLO first.
+
+ This method will return normally if the mail is accepted for at least
+ one recipient. It returns a tuple consisting of:
+
+ - an error dictionary, with one entry for each recipient that was
+ refused. Each entry contains a tuple of the SMTP error code
+ and the accompanying error message sent by the server.
+ - the message sent by the server in response to the DATA command
+ (often containing a message id)
+
+ Example:
+
+ >>> event_loop = asyncio.get_event_loop()
+ >>> smtp = aiosmtplib.SMTP(hostname="127.0.0.1", port=1025)
+ >>> event_loop.run_until_complete(smtp.connect())
+ (220, ...)
+ >>> recipients = ["one@one.org", "two@two.org", "3@three.org"]
+ >>> message = "From: Me@my.org\\nSubject: testing\\nHello World"
+ >>> send_coro = smtp.sendmail("me@my.org", recipients, message)
+ >>> event_loop.run_until_complete(send_coro)
+ ({}, 'OK')
+ >>> event_loop.run_until_complete(smtp.quit())
+ (221, Bye)
+
+ In the above example, the message was accepted for delivery for all
+ three addresses. If delivery had been only successful to two
+ of the three addresses, and one was rejected, the response would look
+ something like::
+
+ (
+ {"nobody@three.org": (550, "User unknown")},
+ "Written safely to disk. #902487694.289148.12219.",
+ )
+
+
+ If delivery is not successful to any addresses,
+ :exc:`.SMTPRecipientsRefused` is raised.
+
+ If :exc:`.SMTPResponseException` is raised by this method, we try to
+ send an RSET command to reset the server envelope automatically for
+ the next attempt.
+
+ :raises SMTPRecipientsRefused: delivery to all recipients failed
+ :raises SMTPResponseException: on invalid response
+ """
+ if isinstance(recipients, str):
+ recipients = [recipients]
+ if mail_options is None:
+ mail_options = []
+ else:
+ mail_options = list(mail_options)
+ if rcpt_options is None:
+ rcpt_options = []
+ else:
+ rcpt_options = list(rcpt_options)
+
+ if any(option.lower() == "smtputf8" for option in mail_options):
+ mailbox_encoding = "utf-8"
+ else:
+ mailbox_encoding = "ascii"
+
+ if self._sendmail_lock is None:
+ self._sendmail_lock = asyncio.Lock()
+
+ async with self._sendmail_lock:
+ # Make sure we've done an EHLO for extension checks
+ await self._ehlo_or_helo_if_needed()
+
+ if mailbox_encoding == "utf-8" and not self.supports_extension("smtputf8"):
+ raise SMTPNotSupported("SMTPUTF8 is not supported by this server")
+
+ if self.supports_extension("size"):
+ message_len = len(message)
+ size_option = f"size={message_len}"
+ mail_options.insert(0, size_option)
+
+ try:
+ await self.mail(
+ sender,
+ options=mail_options,
+ encoding=mailbox_encoding,
+ timeout=timeout,
+ )
+ recipient_errors = await self._send_recipients(
+ recipients, rcpt_options, encoding=mailbox_encoding, timeout=timeout
+ )
+ response = await self.data(message, timeout=timeout)
+ except (SMTPResponseException, SMTPRecipientsRefused) as exc:
+ # If we got an error, reset the envelope.
+ try:
+ await self.rset(timeout=timeout)
+ except (ConnectionError, SMTPResponseException):
+ # If we're disconnected on the reset, or we get a bad
+ # status, don't raise that as it's confusing
+ pass
+ raise exc
+
+ return recipient_errors, response.message
+
+ async def _send_recipients(
+ self,
+ recipients: Sequence[str],
+ options: Iterable[str],
+ encoding: str = "ascii",
+ timeout: Optional[Union[float, Default]] = _default,
+ ) -> Dict[str, SMTPResponse]:
+ """
+ Send the recipients given to the server. Used as part of
+ :meth:`.sendmail`.
+ """
+ recipient_errors = []
+ for address in recipients:
+ try:
+ await self.rcpt(
+ address, options=options, encoding=encoding, timeout=timeout
+ )
+ except SMTPRecipientRefused as exc:
+ recipient_errors.append(exc)
+
+ if len(recipient_errors) == len(recipients):
+ raise SMTPRecipientsRefused(recipient_errors)
+
+ formatted_errors = {
+ err.recipient: SMTPResponse(err.code, err.message)
+ for err in recipient_errors
+ }
+
+ return formatted_errors
+
+ async def send_message(
+ self,
+ message: Union[email.message.EmailMessage, email.message.Message],
+ /,
+ *,
+ sender: Optional[str] = None,
+ recipients: Optional[Union[str, Sequence[str]]] = None,
+ mail_options: Optional[Iterable[str]] = None,
+ rcpt_options: Optional[Iterable[str]] = None,
+ timeout: Optional[Union[float, Default]] = _default,
+ ) -> Tuple[Dict[str, SMTPResponse], str]:
+ r"""
+ Sends an :py:class:`email.message.EmailMessage` object.
+
+ Arguments are as for :meth:`.sendmail`, except that message is an
+ :py:class:`email.message.EmailMessage` object. If sender is None or
+ recipients is None, these arguments are taken from the headers of the
+ EmailMessage as described in RFC 2822. Regardless of the values of sender
+ and recipients, any Bcc field (or Resent-Bcc field, when the message is a
+ resent) of the EmailMessage object will not be transmitted. The EmailMessage
+ object is then serialized using :py:class:`email.generator.Generator` and
+ :meth:`.sendmail` is called to transmit the message.
+
+ 'Resent-Date' is a mandatory field if the message is resent (RFC 2822
+ Section 3.6.6). In such a case, we use the 'Resent-\*' fields.
+ However, if there is more than one 'Resent-' block there's no way to
+ unambiguously determine which one is the most recent in all cases,
+ so rather than guess we raise a ``ValueError`` in that case.
+
+ :raises ValueError:
+ on more than one Resent header block
+ on no sender kwarg or From header in message
+ on no recipients kwarg or To, Cc or Bcc header in message
+ :raises SMTPRecipientsRefused: delivery to all recipients failed
+ :raises SMTPResponseException: on invalid response
+ """
+ if mail_options is None:
+ mail_options = []
+ else:
+ mail_options = list(mail_options)
+
+ if sender is None:
+ sender = extract_sender(message)
+ if sender is None:
+ raise ValueError("No From header provided in message")
+
+ if isinstance(recipients, str):
+ recipients = [recipients]
+ elif recipients is None:
+ recipients = extract_recipients(message)
+ if not recipients:
+ raise ValueError("No recipient headers provided in message")
+
+ # Make sure we've done an EHLO for extension checks
+ await self._ehlo_or_helo_if_needed()
+
+ try:
+ sender.encode("ascii")
+ "".join(recipients).encode("ascii")
+ except UnicodeEncodeError:
+ utf8_required = True
+ else:
+ utf8_required = False
+
+ if utf8_required:
+ if not self.supports_extension("smtputf8"):
+ raise SMTPNotSupported(
+ "An address containing non-ASCII characters was provided, but "
+ "SMTPUTF8 is not supported by this server"
+ )
+ elif "smtputf8" not in [option.lower() for option in mail_options]:
+ mail_options.append("SMTPUTF8")
+
+ if self.supports_extension("8BITMIME"):
+ if "body=8bitmime" not in [option.lower() for option in mail_options]:
+ mail_options.append("BODY=8BITMIME")
+ cte_type = "8bit"
+ else:
+ cte_type = "7bit"
+
+ flat_message = flatten_message(message, utf8=utf8_required, cte_type=cte_type)
+
+ return await self.sendmail(
+ sender,
+ recipients,
+ flat_message,
+ mail_options=mail_options,
+ rcpt_options=rcpt_options,
+ timeout=timeout,
+ )
+
+ def sendmail_sync(
+ self, *args: Any, **kwargs: Any
+ ) -> Tuple[Dict[str, SMTPResponse], str]:
+ """
+ Synchronous version of :meth:`.sendmail`. This method starts
+ an event loop to connect, send the message, and disconnect.
+ """
+
+ async def sendmail_coroutine() -> Tuple[Dict[str, SMTPResponse], str]:
+ async with self:
+ return await self.sendmail(*args, **kwargs)
+
+ return asyncio.run(sendmail_coroutine())
+
+ def send_message_sync(
+ self, *args: Any, **kwargs: Any
+ ) -> Tuple[Dict[str, SMTPResponse], str]:
+ """
+ Synchronous version of :meth:`.send_message`. This method
+ starts an event loop to connect, send the message, and disconnect.
+ """
+
+ async def send_message_coroutine() -> Tuple[Dict[str, SMTPResponse], str]:
+ async with self:
+ return await self.send_message(*args, **kwargs)
+
+ return asyncio.run(send_message_coroutine())
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/status.py b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/status.py
new file mode 100644
index 00000000..dd317d5a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/status.py
@@ -0,0 +1,5 @@
+from .typing import SMTPStatus
+
+
+# alias SMTPStatus for backwards compatibility
+__all__ = ("SMTPStatus",)
diff --git a/Backend/venv/lib/python3.12/site-packages/aiosmtplib/typing.py b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/typing.py
new file mode 100644
index 00000000..df33025e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/aiosmtplib/typing.py
@@ -0,0 +1,60 @@
+import enum
+import os
+from typing import Union
+
+
+__all__ = ("Default", "SMTPStatus", "SocketPathType", "_default")
+
+
+SocketPathType = Union[str, bytes, os.PathLike]
+
+
+class Default(enum.Enum):
+ """
+ Used for type hinting kwarg defaults.
+ """
+
+ token = 0
+
+
+_default = Default.token
+
+
+@enum.unique
+class SMTPStatus(enum.IntEnum):
+ """
+ Defines SMTP statuses for code readability.
+
+ See also: http://www.greenend.org.uk/rjk/tech/smtpreplies.html
+ """
+
+ invalid_response = -1
+ system_status_ok = 211
+ help_message = 214
+ ready = 220
+ closing = 221
+ auth_successful = 235
+ completed = 250
+ will_forward = 251
+ cannot_vrfy = 252
+ auth_continue = 334
+ start_input = 354
+ domain_unavailable = 421
+ mailbox_unavailable = 450
+ error_processing = 451
+ insufficient_storage = 452
+ tls_not_available = 454
+ unrecognized_command = 500
+ unrecognized_parameters = 501
+ command_not_implemented = 502
+ bad_command_sequence = 503
+ parameter_not_implemented = 504
+ domain_does_not_accept_mail = 521
+ access_denied = 530 # Sendmail specific
+ auth_failed = 535
+ mailbox_does_not_exist = 550
+ user_not_local = 551
+ storage_exceeded = 552
+ mailbox_name_invalid = 553
+ transaction_failed = 554
+ syntax_error = 555
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/LICENSE b/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/LICENSE
new file mode 100644
index 00000000..74b9ce34
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/LICENSE
@@ -0,0 +1,19 @@
+Copyright 2009-2023 Michael Bayer.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/METADATA
new file mode 100644
index 00000000..0d019742
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/METADATA
@@ -0,0 +1,141 @@
+Metadata-Version: 2.1
+Name: alembic
+Version: 1.12.1
+Summary: A database migration tool for SQLAlchemy.
+Home-page: https://alembic.sqlalchemy.org
+Author: Mike Bayer
+Author-email: mike_mp@zzzcomputing.com
+License: MIT
+Project-URL: Documentation, https://alembic.sqlalchemy.org/en/latest/
+Project-URL: Changelog, https://alembic.sqlalchemy.org/en/latest/changelog.html
+Project-URL: Source, https://github.com/sqlalchemy/alembic/
+Project-URL: Issue Tracker, https://github.com/sqlalchemy/alembic/issues/
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Environment :: Console
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Database :: Front-Ends
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: SQLAlchemy >=1.3.0
+Requires-Dist: Mako
+Requires-Dist: typing-extensions >=4
+Requires-Dist: importlib-metadata ; python_version < "3.9"
+Requires-Dist: importlib-resources ; python_version < "3.9"
+Provides-Extra: tz
+Requires-Dist: python-dateutil ; extra == 'tz'
+
+Alembic is a database migrations tool written by the author
+of `SQLAlchemy `_. A migrations tool
+offers the following functionality:
+
+* Can emit ALTER statements to a database in order to change
+ the structure of tables and other constructs
+* Provides a system whereby "migration scripts" may be constructed;
+ each script indicates a particular series of steps that can "upgrade" a
+ target database to a new version, and optionally a series of steps that can
+ "downgrade" similarly, doing the same steps in reverse.
+* Allows the scripts to execute in some sequential manner.
+
+The goals of Alembic are:
+
+* Very open ended and transparent configuration and operation. A new
+ Alembic environment is generated from a set of templates which is selected
+ among a set of options when setup first occurs. The templates then deposit a
+ series of scripts that define fully how database connectivity is established
+ and how migration scripts are invoked; the migration scripts themselves are
+ generated from a template within that series of scripts. The scripts can
+ then be further customized to define exactly how databases will be
+ interacted with and what structure new migration files should take.
+* Full support for transactional DDL. The default scripts ensure that all
+ migrations occur within a transaction - for those databases which support
+ this (Postgresql, Microsoft SQL Server), migrations can be tested with no
+ need to manually undo changes upon failure.
+* Minimalist script construction. Basic operations like renaming
+ tables/columns, adding/removing columns, changing column attributes can be
+ performed through one line commands like alter_column(), rename_table(),
+ add_constraint(). There is no need to recreate full SQLAlchemy Table
+ structures for simple operations like these - the functions themselves
+ generate minimalist schema structures behind the scenes to achieve the given
+ DDL sequence.
+* "auto generation" of migrations. While real world migrations are far more
+ complex than what can be automatically determined, Alembic can still
+ eliminate the initial grunt work in generating new migration directives
+ from an altered schema. The ``--autogenerate`` feature will inspect the
+ current status of a database using SQLAlchemy's schema inspection
+ capabilities, compare it to the current state of the database model as
+ specified in Python, and generate a series of "candidate" migrations,
+ rendering them into a new migration script as Python directives. The
+ developer then edits the new file, adding additional directives and data
+ migrations as needed, to produce a finished migration. Table and column
+ level changes can be detected, with constraints and indexes to follow as
+ well.
+* Full support for migrations generated as SQL scripts. Those of us who
+ work in corporate environments know that direct access to DDL commands on a
+ production database is a rare privilege, and DBAs want textual SQL scripts.
+ Alembic's usage model and commands are oriented towards being able to run a
+ series of migrations into a textual output file as easily as it runs them
+ directly to a database. Care must be taken in this mode to not invoke other
+ operations that rely upon in-memory SELECTs of rows - Alembic tries to
+ provide helper constructs like bulk_insert() to help with data-oriented
+ operations that are compatible with script-based DDL.
+* Non-linear, dependency-graph versioning. Scripts are given UUID
+ identifiers similarly to a DVCS, and the linkage of one script to the next
+ is achieved via human-editable markers within the scripts themselves.
+ The structure of a set of migration files is considered as a
+ directed-acyclic graph, meaning any migration file can be dependent
+ on any other arbitrary set of migration files, or none at
+ all. Through this open-ended system, migration files can be organized
+ into branches, multiple roots, and mergepoints, without restriction.
+ Commands are provided to produce new branches, roots, and merges of
+ branches automatically.
+* Provide a library of ALTER constructs that can be used by any SQLAlchemy
+ application. The DDL constructs build upon SQLAlchemy's own DDLElement base
+ and can be used standalone by any application or script.
+* At long last, bring SQLite and its inability to ALTER things into the fold,
+ but in such a way that SQLite's very special workflow needs are accommodated
+ in an explicit way that makes the most of a bad situation, through the
+ concept of a "batch" migration, where multiple changes to a table can
+ be batched together to form a series of instructions for a single, subsequent
+ "move-and-copy" workflow. You can even use "move-and-copy" workflow for
+ other databases, if you want to recreate a table in the background
+ on a busy system.
+
+Documentation and status of Alembic is at https://alembic.sqlalchemy.org/
+
+The SQLAlchemy Project
+======================
+
+Alembic is part of the `SQLAlchemy Project `_ and
+adheres to the same standards and conventions as the core project.
+
+Development / Bug reporting / Pull requests
+___________________________________________
+
+Please refer to the
+`SQLAlchemy Community Guide `_ for
+guidelines on coding and participating in this project.
+
+Code of Conduct
+_______________
+
+Above all, SQLAlchemy places great emphasis on polite, thoughtful, and
+constructive communication between users and developers.
+Please see our current Code of Conduct at
+`Code of Conduct `_.
+
+License
+=======
+
+Alembic is distributed under the `MIT license
+`_.
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/RECORD
new file mode 100644
index 00000000..ad8362d6
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/RECORD
@@ -0,0 +1,149 @@
+../../../bin/alembic,sha256=tvDcYG3JNkV34TcJzWSyPCW82k9I5FmOjqmP6ZNfOCE,226
+alembic-1.12.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+alembic-1.12.1.dist-info/LICENSE,sha256=soUmiob0QW6vTQWyrjiAwVb3xZqPk1pAK8BW6vszrwg,1058
+alembic-1.12.1.dist-info/METADATA,sha256=D9-LeKL0unLPg2JKmlFMB5NAxt9N9y-8oVEGOUHbQnU,7306
+alembic-1.12.1.dist-info/RECORD,,
+alembic-1.12.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+alembic-1.12.1.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
+alembic-1.12.1.dist-info/entry_points.txt,sha256=aykM30soxwGN0pB7etLc1q0cHJbL9dy46RnK9VX4LLw,48
+alembic-1.12.1.dist-info/top_level.txt,sha256=FwKWd5VsPFC8iQjpu1u9Cn-JnK3-V1RhUCmWqz1cl-s,8
+alembic/__init__.py,sha256=gczqgDgBRw3aV70aNeH6WGu0WdASQf_YiChV12qCRRI,75
+alembic/__main__.py,sha256=373m7-TBh72JqrSMYviGrxCHZo-cnweM8AGF8A22PmY,78
+alembic/__pycache__/__init__.cpython-312.pyc,,
+alembic/__pycache__/__main__.cpython-312.pyc,,
+alembic/__pycache__/command.cpython-312.pyc,,
+alembic/__pycache__/config.cpython-312.pyc,,
+alembic/__pycache__/context.cpython-312.pyc,,
+alembic/__pycache__/environment.cpython-312.pyc,,
+alembic/__pycache__/migration.cpython-312.pyc,,
+alembic/__pycache__/op.cpython-312.pyc,,
+alembic/autogenerate/__init__.py,sha256=4IHgWH89pForRq-yCDZhGjjVtsfGX5ECWNPuUs8nGUk,351
+alembic/autogenerate/__pycache__/__init__.cpython-312.pyc,,
+alembic/autogenerate/__pycache__/api.cpython-312.pyc,,
+alembic/autogenerate/__pycache__/compare.cpython-312.pyc,,
+alembic/autogenerate/__pycache__/render.cpython-312.pyc,,
+alembic/autogenerate/__pycache__/rewriter.cpython-312.pyc,,
+alembic/autogenerate/api.py,sha256=MNn0Xtmj44aMFjfiR0LMkbxOynHyiyaRBnrj5EkImm4,21967
+alembic/autogenerate/compare.py,sha256=gSCjxrkQAl0rJD6o9Ln8wNxGVNU6FrWzKZYVkH5Tmac,47042
+alembic/autogenerate/render.py,sha256=Fik2aPZEIxOlTCrBd0UiPxnX5SFG__CvfXqMWoJr6lw,34475
+alembic/autogenerate/rewriter.py,sha256=Osba8GFVeqiX1ypGJW7Axt0ui2EROlaFtVZdMFbhzZ0,7384
+alembic/command.py,sha256=ze4pYvKpB-FtF8rduY6F6n3XHqeA-15iXaaEDeNHVzI,21588
+alembic/config.py,sha256=68e1nmYU5Nfh0bNRqRWUygSilDl1p0G_U1zZ8ifgmD8,21931
+alembic/context.py,sha256=hK1AJOQXJ29Bhn276GYcosxeG7pC5aZRT5E8c4bMJ4Q,195
+alembic/context.pyi,sha256=FLsT0be_vO_ozlC05EJkWR5olDPoTVq-7tgtoM5wSAw,31463
+alembic/ddl/__init__.py,sha256=xXr1W6PePe0gCLwR42ude0E6iru9miUFc1fCeQN4YP8,137
+alembic/ddl/__pycache__/__init__.cpython-312.pyc,,
+alembic/ddl/__pycache__/base.cpython-312.pyc,,
+alembic/ddl/__pycache__/impl.cpython-312.pyc,,
+alembic/ddl/__pycache__/mssql.cpython-312.pyc,,
+alembic/ddl/__pycache__/mysql.cpython-312.pyc,,
+alembic/ddl/__pycache__/oracle.cpython-312.pyc,,
+alembic/ddl/__pycache__/postgresql.cpython-312.pyc,,
+alembic/ddl/__pycache__/sqlite.cpython-312.pyc,,
+alembic/ddl/base.py,sha256=cCY3NldMRggrKd9bZ0mFRBE9GNDaAy0UJcM3ey4Utgw,9638
+alembic/ddl/impl.py,sha256=Z3GpNM2KwBpfl1UCam1YsYbSd0mQzRigOKQhUCLIPgE,25564
+alembic/ddl/mssql.py,sha256=0k26xnUSZNj3qCHEMzRFbaWgUzKcV07I3_-Ns47VhO0,14105
+alembic/ddl/mysql.py,sha256=ff8OE0zQ8YYjAgltBbtjQkDR-g9z65DNeFjEMm4sX6c,16675
+alembic/ddl/oracle.py,sha256=E0VaZaUM_5mwqNiJVA3zOAK-cuHVVIv_-NmUbH1JuGQ,6097
+alembic/ddl/postgresql.py,sha256=aO8pcVN5ycw1wG2m1RRt8dQUD1KgRa6T4rSzg9FPCkU,26457
+alembic/ddl/sqlite.py,sha256=9q7NAxyeFwn9kWwQSc9RLeMFSos8waM7x9lnXdByh44,7613
+alembic/environment.py,sha256=MM5lPayGT04H3aeng1H7GQ8HEAs3VGX5yy6mDLCPLT4,43
+alembic/migration.py,sha256=MV6Fju6rZtn2fTREKzXrCZM6aIBGII4OMZFix0X-GLs,41
+alembic/op.py,sha256=flHtcsVqOD-ZgZKK2pv-CJ5Cwh-KJ7puMUNXzishxLw,167
+alembic/op.pyi,sha256=ldQBwAfzm_-ZsC3nizMuGoD34hjMKb4V_-Q1rR8q8LI,48591
+alembic/operations/__init__.py,sha256=e0KQSZAgLpTWvyvreB7DWg7RJV_MWSOPVDgCqsd2FzY,318
+alembic/operations/__pycache__/__init__.cpython-312.pyc,,
+alembic/operations/__pycache__/base.cpython-312.pyc,,
+alembic/operations/__pycache__/batch.cpython-312.pyc,,
+alembic/operations/__pycache__/ops.cpython-312.pyc,,
+alembic/operations/__pycache__/schemaobj.cpython-312.pyc,,
+alembic/operations/__pycache__/toimpl.cpython-312.pyc,,
+alembic/operations/base.py,sha256=2so4KisDNuOLw0CRiZqorIHrhuenpVoFbn3B0sNvDic,72471
+alembic/operations/batch.py,sha256=uMvGJDlcTs0GSHasg4Gsdv1YcXeLOK_1lkRl3jk1ezY,26954
+alembic/operations/ops.py,sha256=aP9Uz36k98O_Y-njKIAifyvyhi0g2zU6_igKMos91_s,93539
+alembic/operations/schemaobj.py,sha256=-tWad8pgWUNWucbpTnPuFK_EEl913C0RADJhlBnrjhc,9393
+alembic/operations/toimpl.py,sha256=K8nUmojtL94tyLSWdDD-e94IbghZ19k55iBIMvzMm5E,6993
+alembic/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+alembic/runtime/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+alembic/runtime/__pycache__/__init__.cpython-312.pyc,,
+alembic/runtime/__pycache__/environment.cpython-312.pyc,,
+alembic/runtime/__pycache__/migration.cpython-312.pyc,,
+alembic/runtime/environment.py,sha256=qaerrw5jB7zYliNnCvIziaju4-tvQ451MuGW8PHnfvw,41019
+alembic/runtime/migration.py,sha256=5UtTI_T0JtYzt6ZpeUhannMZOvXWiEymKFOpeCefaPY,49407
+alembic/script/__init__.py,sha256=lSj06O391Iy5avWAiq8SPs6N8RBgxkSPjP8wpXcNDGg,100
+alembic/script/__pycache__/__init__.cpython-312.pyc,,
+alembic/script/__pycache__/base.cpython-312.pyc,,
+alembic/script/__pycache__/revision.cpython-312.pyc,,
+alembic/script/__pycache__/write_hooks.cpython-312.pyc,,
+alembic/script/base.py,sha256=90SpT8wyTMTUuS0Svsy5YIoqJSrR-6CtYSzStmRvFT0,37174
+alembic/script/revision.py,sha256=DE0nwvDOzdFo843brvnhs1DfP0jRC5EVQHrNihC7PUQ,61471
+alembic/script/write_hooks.py,sha256=Nqj4zz3sm97kAPOpK1m-i2znJchiybO_TWT50oljlJw,4917
+alembic/templates/async/README,sha256=ISVtAOvqvKk_5ThM5ioJE-lMkvf9IbknFUFVU_vPma4,58
+alembic/templates/async/__pycache__/env.cpython-312.pyc,,
+alembic/templates/async/alembic.ini.mako,sha256=k3IyGDG15Rp1JDweC0TiDauaKYNvj3clrGfhw6oV6MI,3505
+alembic/templates/async/env.py,sha256=zbOCf3Y7w2lg92hxSwmG1MM_7y56i_oRH4AKp0pQBYo,2389
+alembic/templates/async/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
+alembic/templates/generic/README,sha256=MVlc9TYmr57RbhXET6QxgyCcwWP7w-vLkEsirENqiIQ,38
+alembic/templates/generic/__pycache__/env.cpython-312.pyc,,
+alembic/templates/generic/alembic.ini.mako,sha256=gZWFmH2A9sP0i7cxEDhJFkjGtTKUXaVna8QAbIaRqxk,3614
+alembic/templates/generic/env.py,sha256=TLRWOVW3Xpt_Tpf8JFzlnoPn_qoUu8UV77Y4o9XD6yI,2103
+alembic/templates/generic/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
+alembic/templates/multidb/README,sha256=dWLDhnBgphA4Nzb7sNlMfCS3_06YqVbHhz-9O5JNqyI,606
+alembic/templates/multidb/__pycache__/env.cpython-312.pyc,,
+alembic/templates/multidb/alembic.ini.mako,sha256=j_Y0yuZVoHy7sTPgSPd8DmbT2ItvAdWs7trYZSOmFnw,3708
+alembic/templates/multidb/env.py,sha256=6zNjnW8mXGUk7erTsAvrfhvqoczJ-gagjVq1Ypg2YIQ,4230
+alembic/templates/multidb/script.py.mako,sha256=N06nMtNSwHkgl0EBXDyMt8njp9tlOesR583gfq21nbY,1090
+alembic/testing/__init__.py,sha256=kOxOh5nwmui9d-_CCq9WA4Udwy7ITjm453w74CTLZDo,1159
+alembic/testing/__pycache__/__init__.cpython-312.pyc,,
+alembic/testing/__pycache__/assertions.cpython-312.pyc,,
+alembic/testing/__pycache__/env.cpython-312.pyc,,
+alembic/testing/__pycache__/fixtures.cpython-312.pyc,,
+alembic/testing/__pycache__/requirements.cpython-312.pyc,,
+alembic/testing/__pycache__/schemacompare.cpython-312.pyc,,
+alembic/testing/__pycache__/util.cpython-312.pyc,,
+alembic/testing/__pycache__/warnings.cpython-312.pyc,,
+alembic/testing/assertions.py,sha256=1CbJk8c8-WO9eJ0XJ0jJvMsNRLUrXV41NOeIJUAlOBk,5015
+alembic/testing/env.py,sha256=zJacVb_z6uLs2U1TtkmnFH9P3_F-3IfYbVv4UEPOvfo,10754
+alembic/testing/fixtures.py,sha256=NyP4wE_dFN9ZzSGiBagRu1cdzkka03nwJYJYHYrrkSY,9112
+alembic/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+alembic/testing/plugin/__pycache__/__init__.cpython-312.pyc,,
+alembic/testing/plugin/__pycache__/bootstrap.cpython-312.pyc,,
+alembic/testing/plugin/bootstrap.py,sha256=9C6wtjGrIVztZ928w27hsQE0KcjDLIUtUN3dvZKsMVk,50
+alembic/testing/requirements.py,sha256=WByOiJxn2crazIXPq6-0cfqV95cfd9vP_ZQ1Cf2l8hY,4841
+alembic/testing/schemacompare.py,sha256=7_4_0Y4UvuMiZ66pz1RC_P8Z1kYOP-R4Y5qUcNmcMKA,4535
+alembic/testing/suite/__init__.py,sha256=MvE7-hwbaVN1q3NM-ztGxORU9dnIelUCINKqNxewn7Y,288
+alembic/testing/suite/__pycache__/__init__.cpython-312.pyc,,
+alembic/testing/suite/__pycache__/_autogen_fixtures.cpython-312.pyc,,
+alembic/testing/suite/__pycache__/test_autogen_comments.cpython-312.pyc,,
+alembic/testing/suite/__pycache__/test_autogen_computed.cpython-312.pyc,,
+alembic/testing/suite/__pycache__/test_autogen_diffs.cpython-312.pyc,,
+alembic/testing/suite/__pycache__/test_autogen_fks.cpython-312.pyc,,
+alembic/testing/suite/__pycache__/test_autogen_identity.cpython-312.pyc,,
+alembic/testing/suite/__pycache__/test_environment.cpython-312.pyc,,
+alembic/testing/suite/__pycache__/test_op.cpython-312.pyc,,
+alembic/testing/suite/_autogen_fixtures.py,sha256=cDq1pmzHe15S6dZPGNC6sqFaCQ3hLT_oPV2IDigUGQ0,9880
+alembic/testing/suite/test_autogen_comments.py,sha256=aEGqKUDw4kHjnDk298aoGcQvXJWmZXcIX_2FxH4cJK8,6283
+alembic/testing/suite/test_autogen_computed.py,sha256=qJeBpc8urnwTFvbwWrSTIbHVkRUuCXP-dKaNbUK2U2U,6077
+alembic/testing/suite/test_autogen_diffs.py,sha256=T4SR1n_kmcOKYhR4W1-dA0e5sddJ69DSVL2HW96kAkE,8394
+alembic/testing/suite/test_autogen_fks.py,sha256=AqFmb26Buex167HYa9dZWOk8x-JlB1OK3bwcvvjDFaU,32927
+alembic/testing/suite/test_autogen_identity.py,sha256=kcuqngG7qXAKPJDX4U8sRzPKHEJECHuZ0DtuaS6tVkk,5824
+alembic/testing/suite/test_environment.py,sha256=w9F0xnLEbALeR8k6_-Tz6JHvy91IqiTSypNasVzXfZQ,11877
+alembic/testing/suite/test_op.py,sha256=2XQCdm_NmnPxHGuGj7hmxMzIhKxXNotUsKdACXzE1mM,1343
+alembic/testing/util.py,sha256=CQrcQDA8fs_7ME85z5ydb-Bt70soIIID-qNY1vbR2dg,3350
+alembic/testing/warnings.py,sha256=RxA7x_8GseANgw07Us8JN_1iGbANxaw6_VitX2ZGQH4,1078
+alembic/util/__init__.py,sha256=cPF_jjFx7YRBByHHDqW3wxCIHsqnGfncEr_i238aduY,1202
+alembic/util/__pycache__/__init__.cpython-312.pyc,,
+alembic/util/__pycache__/compat.cpython-312.pyc,,
+alembic/util/__pycache__/editor.cpython-312.pyc,,
+alembic/util/__pycache__/exc.cpython-312.pyc,,
+alembic/util/__pycache__/langhelpers.cpython-312.pyc,,
+alembic/util/__pycache__/messaging.cpython-312.pyc,,
+alembic/util/__pycache__/pyfiles.cpython-312.pyc,,
+alembic/util/__pycache__/sqla_compat.cpython-312.pyc,,
+alembic/util/compat.py,sha256=WN8jPPFB9ri_uuEM1HEaN1ak3RJc_H3x8NqvtFkoXuM,2279
+alembic/util/editor.py,sha256=JIz6_BdgV8_oKtnheR6DZoB7qnrHrlRgWjx09AsTsUw,2546
+alembic/util/exc.py,sha256=KQTru4zcgAmN4IxLMwLFS56XToUewaXB7oOLcPNjPwg,98
+alembic/util/langhelpers.py,sha256=ZFGyGygHRbztOeajpajppyhd-Gp4PB5slMuvCFVrnmg,8591
+alembic/util/messaging.py,sha256=B6T-loMhIOY3OTbG47Ywp1Df9LZn18PgjwpwLrD1VNg,3042
+alembic/util/pyfiles.py,sha256=95J01FChN0j2uP3p72mjaOQvh5wC6XbdGtTDK8oEzsQ,3373
+alembic/util/sqla_compat.py,sha256=94MHlkj43y-QQySz5dCUiJUNOPr3BF9TQ_BrP6ey-8w,18906
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/REQUESTED b/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/REQUESTED
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/WHEEL
new file mode 100644
index 00000000..7e688737
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.41.2)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/entry_points.txt b/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/entry_points.txt
new file mode 100644
index 00000000..59452681
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/entry_points.txt
@@ -0,0 +1,2 @@
+[console_scripts]
+alembic = alembic.config:main
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/top_level.txt b/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/top_level.txt
new file mode 100644
index 00000000..b5bd98d3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic-1.12.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+alembic
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/__init__.py b/Backend/venv/lib/python3.12/site-packages/alembic/__init__.py
new file mode 100644
index 00000000..c5870fb1
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/__init__.py
@@ -0,0 +1,6 @@
+import sys
+
+from . import context
+from . import op
+
+__version__ = "1.12.1"
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/__main__.py b/Backend/venv/lib/python3.12/site-packages/alembic/__main__.py
new file mode 100644
index 00000000..af1b8e87
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/__main__.py
@@ -0,0 +1,4 @@
+from .config import main
+
+if __name__ == "__main__":
+ main(prog="alembic")
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..f81453fe
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/__main__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/__main__.cpython-312.pyc
new file mode 100644
index 00000000..eb2d9c8f
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/__main__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/command.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/command.cpython-312.pyc
new file mode 100644
index 00000000..4d187d60
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/command.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/config.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/config.cpython-312.pyc
new file mode 100644
index 00000000..2d91df5f
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/config.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/context.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/context.cpython-312.pyc
new file mode 100644
index 00000000..75113e7f
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/context.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/environment.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/environment.cpython-312.pyc
new file mode 100644
index 00000000..ae6aa5a2
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/environment.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/migration.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/migration.cpython-312.pyc
new file mode 100644
index 00000000..bb5ec359
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/migration.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/op.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/op.cpython-312.pyc
new file mode 100644
index 00000000..3e2d1e87
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/__pycache__/op.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__init__.py b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__init__.py
new file mode 100644
index 00000000..cd2ed1c1
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__init__.py
@@ -0,0 +1,10 @@
+from .api import _render_migration_diffs
+from .api import compare_metadata
+from .api import produce_migrations
+from .api import render_python_code
+from .api import RevisionContext
+from .compare import _produce_net_changes
+from .compare import comparators
+from .render import render_op_text
+from .render import renderers
+from .rewriter import Rewriter
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..784442a4
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/api.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/api.cpython-312.pyc
new file mode 100644
index 00000000..dbe4d2cc
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/api.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/compare.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/compare.cpython-312.pyc
new file mode 100644
index 00000000..e4c9184b
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/compare.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/render.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/render.cpython-312.pyc
new file mode 100644
index 00000000..cadd1cad
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/render.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/rewriter.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/rewriter.cpython-312.pyc
new file mode 100644
index 00000000..24dadd82
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/__pycache__/rewriter.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/api.py b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/api.py
new file mode 100644
index 00000000..7282487b
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/api.py
@@ -0,0 +1,647 @@
+from __future__ import annotations
+
+import contextlib
+from typing import Any
+from typing import Dict
+from typing import Iterator
+from typing import List
+from typing import Optional
+from typing import Sequence
+from typing import Set
+from typing import TYPE_CHECKING
+from typing import Union
+
+from sqlalchemy import inspect
+
+from . import compare
+from . import render
+from .. import util
+from ..operations import ops
+
+"""Provide the 'autogenerate' feature which can produce migration operations
+automatically."""
+
+if TYPE_CHECKING:
+ from sqlalchemy.engine import Connection
+ from sqlalchemy.engine import Dialect
+ from sqlalchemy.engine import Inspector
+ from sqlalchemy.sql.schema import MetaData
+ from sqlalchemy.sql.schema import SchemaItem
+
+ from ..config import Config
+ from ..operations.ops import DowngradeOps
+ from ..operations.ops import MigrationScript
+ from ..operations.ops import UpgradeOps
+ from ..runtime.environment import NameFilterParentNames
+ from ..runtime.environment import NameFilterType
+ from ..runtime.environment import ProcessRevisionDirectiveFn
+ from ..runtime.environment import RenderItemFn
+ from ..runtime.migration import MigrationContext
+ from ..script.base import Script
+ from ..script.base import ScriptDirectory
+ from ..script.revision import _GetRevArg
+
+
+def compare_metadata(context: MigrationContext, metadata: MetaData) -> Any:
+ """Compare a database schema to that given in a
+ :class:`~sqlalchemy.schema.MetaData` instance.
+
+ The database connection is presented in the context
+ of a :class:`.MigrationContext` object, which
+ provides database connectivity as well as optional
+ comparison functions to use for datatypes and
+ server defaults - see the "autogenerate" arguments
+ at :meth:`.EnvironmentContext.configure`
+ for details on these.
+
+ The return format is a list of "diff" directives,
+ each representing individual differences::
+
+ from alembic.migration import MigrationContext
+ from alembic.autogenerate import compare_metadata
+ from sqlalchemy import (
+ create_engine,
+ MetaData,
+ Column,
+ Integer,
+ String,
+ Table,
+ text,
+ )
+ import pprint
+
+ engine = create_engine("sqlite://")
+
+ with engine.begin() as conn:
+ conn.execute(
+ text(
+ '''
+ create table foo (
+ id integer not null primary key,
+ old_data varchar,
+ x integer
+ )
+ '''
+ )
+ )
+ conn.execute(text("create table bar (data varchar)"))
+
+ metadata = MetaData()
+ Table(
+ "foo",
+ metadata,
+ Column("id", Integer, primary_key=True),
+ Column("data", Integer),
+ Column("x", Integer, nullable=False),
+ )
+ Table("bat", metadata, Column("info", String))
+
+ mc = MigrationContext.configure(engine.connect())
+
+ diff = compare_metadata(mc, metadata)
+ pprint.pprint(diff, indent=2, width=20)
+
+ Output::
+
+ [
+ (
+ "add_table",
+ Table(
+ "bat",
+ MetaData(),
+ Column("info", String(), table=),
+ schema=None,
+ ),
+ ),
+ (
+ "remove_table",
+ Table(
+ "bar",
+ MetaData(),
+ Column("data", VARCHAR(), table=),
+ schema=None,
+ ),
+ ),
+ (
+ "add_column",
+ None,
+ "foo",
+ Column("data", Integer(), table=),
+ ),
+ [
+ (
+ "modify_nullable",
+ None,
+ "foo",
+ "x",
+ {
+ "existing_comment": None,
+ "existing_server_default": False,
+ "existing_type": INTEGER(),
+ },
+ True,
+ False,
+ )
+ ],
+ (
+ "remove_column",
+ None,
+ "foo",
+ Column("old_data", VARCHAR(), table=),
+ ),
+ ]
+
+ :param context: a :class:`.MigrationContext`
+ instance.
+ :param metadata: a :class:`~sqlalchemy.schema.MetaData`
+ instance.
+
+ .. seealso::
+
+ :func:`.produce_migrations` - produces a :class:`.MigrationScript`
+ structure based on metadata comparison.
+
+ """
+
+ migration_script = produce_migrations(context, metadata)
+ return migration_script.upgrade_ops.as_diffs()
+
+
+def produce_migrations(
+ context: MigrationContext, metadata: MetaData
+) -> MigrationScript:
+ """Produce a :class:`.MigrationScript` structure based on schema
+ comparison.
+
+ This function does essentially what :func:`.compare_metadata` does,
+ but then runs the resulting list of diffs to produce the full
+ :class:`.MigrationScript` object. For an example of what this looks like,
+ see the example in :ref:`customizing_revision`.
+
+ .. seealso::
+
+ :func:`.compare_metadata` - returns more fundamental "diff"
+ data from comparing a schema.
+
+ """
+
+ autogen_context = AutogenContext(context, metadata=metadata)
+
+ migration_script = ops.MigrationScript(
+ rev_id=None,
+ upgrade_ops=ops.UpgradeOps([]),
+ downgrade_ops=ops.DowngradeOps([]),
+ )
+
+ compare._populate_migration_script(autogen_context, migration_script)
+
+ return migration_script
+
+
+def render_python_code(
+ up_or_down_op: Union[UpgradeOps, DowngradeOps],
+ sqlalchemy_module_prefix: str = "sa.",
+ alembic_module_prefix: str = "op.",
+ render_as_batch: bool = False,
+ imports: Sequence[str] = (),
+ render_item: Optional[RenderItemFn] = None,
+ migration_context: Optional[MigrationContext] = None,
+ user_module_prefix: Optional[str] = None,
+) -> str:
+ """Render Python code given an :class:`.UpgradeOps` or
+ :class:`.DowngradeOps` object.
+
+ This is a convenience function that can be used to test the
+ autogenerate output of a user-defined :class:`.MigrationScript` structure.
+
+ :param up_or_down_op: :class:`.UpgradeOps` or :class:`.DowngradeOps` object
+ :param sqlalchemy_module_prefix: module prefix for SQLAlchemy objects
+ :param alembic_module_prefix: module prefix for Alembic constructs
+ :param render_as_batch: use "batch operations" style for rendering
+ :param imports: sequence of import symbols to add
+ :param render_item: callable to render items
+ :param migration_context: optional :class:`.MigrationContext`
+ :param user_module_prefix: optional string prefix for user-defined types
+
+ .. versionadded:: 1.11.0
+
+ """
+ opts = {
+ "sqlalchemy_module_prefix": sqlalchemy_module_prefix,
+ "alembic_module_prefix": alembic_module_prefix,
+ "render_item": render_item,
+ "render_as_batch": render_as_batch,
+ "user_module_prefix": user_module_prefix,
+ }
+
+ if migration_context is None:
+ from ..runtime.migration import MigrationContext
+ from sqlalchemy.engine.default import DefaultDialect
+
+ migration_context = MigrationContext.configure(
+ dialect=DefaultDialect()
+ )
+
+ autogen_context = AutogenContext(migration_context, opts=opts)
+ autogen_context.imports = set(imports)
+ return render._indent(
+ render._render_cmd_body(up_or_down_op, autogen_context)
+ )
+
+
+def _render_migration_diffs(
+ context: MigrationContext, template_args: Dict[Any, Any]
+) -> None:
+ """legacy, used by test_autogen_composition at the moment"""
+
+ autogen_context = AutogenContext(context)
+
+ upgrade_ops = ops.UpgradeOps([])
+ compare._produce_net_changes(autogen_context, upgrade_ops)
+
+ migration_script = ops.MigrationScript(
+ rev_id=None,
+ upgrade_ops=upgrade_ops,
+ downgrade_ops=upgrade_ops.reverse(),
+ )
+
+ render._render_python_into_templatevars(
+ autogen_context, migration_script, template_args
+ )
+
+
+class AutogenContext:
+ """Maintains configuration and state that's specific to an
+ autogenerate operation."""
+
+ metadata: Optional[MetaData] = None
+ """The :class:`~sqlalchemy.schema.MetaData` object
+ representing the destination.
+
+ This object is the one that is passed within ``env.py``
+ to the :paramref:`.EnvironmentContext.configure.target_metadata`
+ parameter. It represents the structure of :class:`.Table` and other
+ objects as stated in the current database model, and represents the
+ destination structure for the database being examined.
+
+ While the :class:`~sqlalchemy.schema.MetaData` object is primarily
+ known as a collection of :class:`~sqlalchemy.schema.Table` objects,
+ it also has an :attr:`~sqlalchemy.schema.MetaData.info` dictionary
+ that may be used by end-user schemes to store additional schema-level
+ objects that are to be compared in custom autogeneration schemes.
+
+ """
+
+ connection: Optional[Connection] = None
+ """The :class:`~sqlalchemy.engine.base.Connection` object currently
+ connected to the database backend being compared.
+
+ This is obtained from the :attr:`.MigrationContext.bind` and is
+ ultimately set up in the ``env.py`` script.
+
+ """
+
+ dialect: Optional[Dialect] = None
+ """The :class:`~sqlalchemy.engine.Dialect` object currently in use.
+
+ This is normally obtained from the
+ :attr:`~sqlalchemy.engine.base.Connection.dialect` attribute.
+
+ """
+
+ imports: Set[str] = None # type: ignore[assignment]
+ """A ``set()`` which contains string Python import directives.
+
+ The directives are to be rendered into the ``${imports}`` section
+ of a script template. The set is normally empty and can be modified
+ within hooks such as the
+ :paramref:`.EnvironmentContext.configure.render_item` hook.
+
+ .. seealso::
+
+ :ref:`autogen_render_types`
+
+ """
+
+ migration_context: MigrationContext = None # type: ignore[assignment]
+ """The :class:`.MigrationContext` established by the ``env.py`` script."""
+
+ def __init__(
+ self,
+ migration_context: MigrationContext,
+ metadata: Optional[MetaData] = None,
+ opts: Optional[dict] = None,
+ autogenerate: bool = True,
+ ) -> None:
+ if (
+ autogenerate
+ and migration_context is not None
+ and migration_context.as_sql
+ ):
+ raise util.CommandError(
+ "autogenerate can't use as_sql=True as it prevents querying "
+ "the database for schema information"
+ )
+
+ if opts is None:
+ opts = migration_context.opts
+
+ self.metadata = metadata = (
+ opts.get("target_metadata", None) if metadata is None else metadata
+ )
+
+ if (
+ autogenerate
+ and metadata is None
+ and migration_context is not None
+ and migration_context.script is not None
+ ):
+ raise util.CommandError(
+ "Can't proceed with --autogenerate option; environment "
+ "script %s does not provide "
+ "a MetaData object or sequence of objects to the context."
+ % (migration_context.script.env_py_location)
+ )
+
+ include_object = opts.get("include_object", None)
+ include_name = opts.get("include_name", None)
+
+ object_filters = []
+ name_filters = []
+ if include_object:
+ object_filters.append(include_object)
+ if include_name:
+ name_filters.append(include_name)
+
+ self._object_filters = object_filters
+ self._name_filters = name_filters
+
+ self.migration_context = migration_context
+ if self.migration_context is not None:
+ self.connection = self.migration_context.bind
+ self.dialect = self.migration_context.dialect
+
+ self.imports = set()
+ self.opts: Dict[str, Any] = opts
+ self._has_batch: bool = False
+
+ @util.memoized_property
+ def inspector(self) -> Inspector:
+ if self.connection is None:
+ raise TypeError(
+ "can't return inspector as this "
+ "AutogenContext has no database connection"
+ )
+ return inspect(self.connection)
+
+ @contextlib.contextmanager
+ def _within_batch(self) -> Iterator[None]:
+ self._has_batch = True
+ yield
+ self._has_batch = False
+
+ def run_name_filters(
+ self,
+ name: Optional[str],
+ type_: NameFilterType,
+ parent_names: NameFilterParentNames,
+ ) -> bool:
+ """Run the context's name filters and return True if the targets
+ should be part of the autogenerate operation.
+
+ This method should be run for every kind of name encountered within the
+ reflection side of an autogenerate operation, giving the environment
+ the chance to filter what names should be reflected as database
+ objects. The filters here are produced directly via the
+ :paramref:`.EnvironmentContext.configure.include_name` parameter.
+
+ """
+ if "schema_name" in parent_names:
+ if type_ == "table":
+ table_name = name
+ else:
+ table_name = parent_names.get("table_name", None)
+ if table_name:
+ schema_name = parent_names["schema_name"]
+ if schema_name:
+ parent_names["schema_qualified_table_name"] = "%s.%s" % (
+ schema_name,
+ table_name,
+ )
+ else:
+ parent_names["schema_qualified_table_name"] = table_name
+
+ for fn in self._name_filters:
+ if not fn(name, type_, parent_names):
+ return False
+ else:
+ return True
+
+ def run_object_filters(
+ self,
+ object_: SchemaItem,
+ name: Optional[str],
+ type_: NameFilterType,
+ reflected: bool,
+ compare_to: Optional[SchemaItem],
+ ) -> bool:
+ """Run the context's object filters and return True if the targets
+ should be part of the autogenerate operation.
+
+ This method should be run for every kind of object encountered within
+ an autogenerate operation, giving the environment the chance
+ to filter what objects should be included in the comparison.
+ The filters here are produced directly via the
+ :paramref:`.EnvironmentContext.configure.include_object` parameter.
+
+ """
+ for fn in self._object_filters:
+ if not fn(object_, name, type_, reflected, compare_to):
+ return False
+ else:
+ return True
+
+ run_filters = run_object_filters
+
+ @util.memoized_property
+ def sorted_tables(self):
+ """Return an aggregate of the :attr:`.MetaData.sorted_tables`
+ collection(s).
+
+ For a sequence of :class:`.MetaData` objects, this
+ concatenates the :attr:`.MetaData.sorted_tables` collection
+ for each individual :class:`.MetaData` in the order of the
+ sequence. It does **not** collate the sorted tables collections.
+
+ """
+ result = []
+ for m in util.to_list(self.metadata):
+ result.extend(m.sorted_tables)
+ return result
+
+ @util.memoized_property
+ def table_key_to_table(self):
+ """Return an aggregate of the :attr:`.MetaData.tables` dictionaries.
+
+ The :attr:`.MetaData.tables` collection is a dictionary of table key
+ to :class:`.Table`; this method aggregates the dictionary across
+ multiple :class:`.MetaData` objects into one dictionary.
+
+ Duplicate table keys are **not** supported; if two :class:`.MetaData`
+ objects contain the same table key, an exception is raised.
+
+ """
+ result = {}
+ for m in util.to_list(self.metadata):
+ intersect = set(result).intersection(set(m.tables))
+ if intersect:
+ raise ValueError(
+ "Duplicate table keys across multiple "
+ "MetaData objects: %s"
+ % (", ".join('"%s"' % key for key in sorted(intersect)))
+ )
+
+ result.update(m.tables)
+ return result
+
+
+class RevisionContext:
+ """Maintains configuration and state that's specific to a revision
+ file generation operation."""
+
+ generated_revisions: List[MigrationScript]
+ process_revision_directives: Optional[ProcessRevisionDirectiveFn]
+
+ def __init__(
+ self,
+ config: Config,
+ script_directory: ScriptDirectory,
+ command_args: Dict[str, Any],
+ process_revision_directives: Optional[
+ ProcessRevisionDirectiveFn
+ ] = None,
+ ) -> None:
+ self.config = config
+ self.script_directory = script_directory
+ self.command_args = command_args
+ self.process_revision_directives = process_revision_directives
+ self.template_args = {
+ "config": config # Let templates use config for
+ # e.g. multiple databases
+ }
+ self.generated_revisions = [self._default_revision()]
+
+ def _to_script(
+ self, migration_script: MigrationScript
+ ) -> Optional[Script]:
+ template_args: Dict[str, Any] = self.template_args.copy()
+
+ if getattr(migration_script, "_needs_render", False):
+ autogen_context = self._last_autogen_context
+
+ # clear out existing imports if we are doing multiple
+ # renders
+ autogen_context.imports = set()
+ if migration_script.imports:
+ autogen_context.imports.update(migration_script.imports)
+ render._render_python_into_templatevars(
+ autogen_context, migration_script, template_args
+ )
+
+ assert migration_script.rev_id is not None
+ return self.script_directory.generate_revision(
+ migration_script.rev_id,
+ migration_script.message,
+ refresh=True,
+ head=migration_script.head,
+ splice=migration_script.splice,
+ branch_labels=migration_script.branch_label,
+ version_path=migration_script.version_path,
+ depends_on=migration_script.depends_on,
+ **template_args,
+ )
+
+ def run_autogenerate(
+ self, rev: _GetRevArg, migration_context: MigrationContext
+ ) -> None:
+ self._run_environment(rev, migration_context, True)
+
+ def run_no_autogenerate(
+ self, rev: _GetRevArg, migration_context: MigrationContext
+ ) -> None:
+ self._run_environment(rev, migration_context, False)
+
+ def _run_environment(
+ self,
+ rev: _GetRevArg,
+ migration_context: MigrationContext,
+ autogenerate: bool,
+ ) -> None:
+ if autogenerate:
+ if self.command_args["sql"]:
+ raise util.CommandError(
+ "Using --sql with --autogenerate does not make any sense"
+ )
+ if set(self.script_directory.get_revisions(rev)) != set(
+ self.script_directory.get_revisions("heads")
+ ):
+ raise util.CommandError("Target database is not up to date.")
+
+ upgrade_token = migration_context.opts["upgrade_token"]
+ downgrade_token = migration_context.opts["downgrade_token"]
+
+ migration_script = self.generated_revisions[-1]
+ if not getattr(migration_script, "_needs_render", False):
+ migration_script.upgrade_ops_list[-1].upgrade_token = upgrade_token
+ migration_script.downgrade_ops_list[
+ -1
+ ].downgrade_token = downgrade_token
+ migration_script._needs_render = True
+ else:
+ migration_script._upgrade_ops.append(
+ ops.UpgradeOps([], upgrade_token=upgrade_token)
+ )
+ migration_script._downgrade_ops.append(
+ ops.DowngradeOps([], downgrade_token=downgrade_token)
+ )
+
+ autogen_context = AutogenContext(
+ migration_context, autogenerate=autogenerate
+ )
+ self._last_autogen_context: AutogenContext = autogen_context
+
+ if autogenerate:
+ compare._populate_migration_script(
+ autogen_context, migration_script
+ )
+
+ if self.process_revision_directives:
+ self.process_revision_directives(
+ migration_context, rev, self.generated_revisions
+ )
+
+ hook = migration_context.opts["process_revision_directives"]
+ if hook:
+ hook(migration_context, rev, self.generated_revisions)
+
+ for migration_script in self.generated_revisions:
+ migration_script._needs_render = True
+
+ def _default_revision(self) -> MigrationScript:
+ command_args: Dict[str, Any] = self.command_args
+ op = ops.MigrationScript(
+ rev_id=command_args["rev_id"] or util.rev_id(),
+ message=command_args["message"],
+ upgrade_ops=ops.UpgradeOps([]),
+ downgrade_ops=ops.DowngradeOps([]),
+ head=command_args["head"],
+ splice=command_args["splice"],
+ branch_label=command_args["branch_label"],
+ version_path=command_args["version_path"],
+ depends_on=command_args["depends_on"],
+ )
+ return op
+
+ def generate_scripts(self) -> Iterator[Optional[Script]]:
+ for generated_revision in self.generated_revisions:
+ yield self._to_script(generated_revision)
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/compare.py b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/compare.py
new file mode 100644
index 00000000..a24a75d1
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/compare.py
@@ -0,0 +1,1394 @@
+from __future__ import annotations
+
+import contextlib
+import logging
+import re
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import Iterator
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import Set
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+from sqlalchemy import event
+from sqlalchemy import inspect
+from sqlalchemy import schema as sa_schema
+from sqlalchemy import text
+from sqlalchemy import types as sqltypes
+from sqlalchemy.sql import expression
+from sqlalchemy.util import OrderedSet
+
+from alembic.ddl.base import _fk_spec
+from .. import util
+from ..operations import ops
+from ..util import sqla_compat
+
+if TYPE_CHECKING:
+ from typing import Literal
+
+ from sqlalchemy.engine.reflection import Inspector
+ from sqlalchemy.sql.elements import quoted_name
+ from sqlalchemy.sql.elements import TextClause
+ from sqlalchemy.sql.schema import Column
+ from sqlalchemy.sql.schema import ForeignKeyConstraint
+ from sqlalchemy.sql.schema import Index
+ from sqlalchemy.sql.schema import Table
+ from sqlalchemy.sql.schema import UniqueConstraint
+
+ from alembic.autogenerate.api import AutogenContext
+ from alembic.ddl.impl import DefaultImpl
+ from alembic.operations.ops import AlterColumnOp
+ from alembic.operations.ops import MigrationScript
+ from alembic.operations.ops import ModifyTableOps
+ from alembic.operations.ops import UpgradeOps
+
+log = logging.getLogger(__name__)
+
+
+def _populate_migration_script(
+ autogen_context: AutogenContext, migration_script: MigrationScript
+) -> None:
+ upgrade_ops = migration_script.upgrade_ops_list[-1]
+ downgrade_ops = migration_script.downgrade_ops_list[-1]
+
+ _produce_net_changes(autogen_context, upgrade_ops)
+ upgrade_ops.reverse_into(downgrade_ops)
+
+
+comparators = util.Dispatcher(uselist=True)
+
+
+def _produce_net_changes(
+ autogen_context: AutogenContext, upgrade_ops: UpgradeOps
+) -> None:
+ connection = autogen_context.connection
+ assert connection is not None
+ include_schemas = autogen_context.opts.get("include_schemas", False)
+
+ inspector: Inspector = inspect(connection)
+
+ default_schema = connection.dialect.default_schema_name
+ schemas: Set[Optional[str]]
+ if include_schemas:
+ schemas = set(inspector.get_schema_names())
+ # replace default schema name with None
+ schemas.discard("information_schema")
+ # replace the "default" schema with None
+ schemas.discard(default_schema)
+ schemas.add(None)
+ else:
+ schemas = {None}
+
+ schemas = {
+ s for s in schemas if autogen_context.run_name_filters(s, "schema", {})
+ }
+
+ assert autogen_context.dialect is not None
+ comparators.dispatch("schema", autogen_context.dialect.name)(
+ autogen_context, upgrade_ops, schemas
+ )
+
+
+@comparators.dispatch_for("schema")
+def _autogen_for_tables(
+ autogen_context: AutogenContext,
+ upgrade_ops: UpgradeOps,
+ schemas: Union[Set[None], Set[Optional[str]]],
+) -> None:
+ inspector = autogen_context.inspector
+
+ conn_table_names: Set[Tuple[Optional[str], str]] = set()
+
+ version_table_schema = (
+ autogen_context.migration_context.version_table_schema
+ )
+ version_table = autogen_context.migration_context.version_table
+
+ for schema_name in schemas:
+ tables = set(inspector.get_table_names(schema=schema_name))
+ if schema_name == version_table_schema:
+ tables = tables.difference(
+ [autogen_context.migration_context.version_table]
+ )
+
+ conn_table_names.update(
+ (schema_name, tname)
+ for tname in tables
+ if autogen_context.run_name_filters(
+ tname, "table", {"schema_name": schema_name}
+ )
+ )
+
+ metadata_table_names = OrderedSet(
+ [(table.schema, table.name) for table in autogen_context.sorted_tables]
+ ).difference([(version_table_schema, version_table)])
+
+ _compare_tables(
+ conn_table_names,
+ metadata_table_names,
+ inspector,
+ upgrade_ops,
+ autogen_context,
+ )
+
+
+def _compare_tables(
+ conn_table_names: set,
+ metadata_table_names: set,
+ inspector: Inspector,
+ upgrade_ops: UpgradeOps,
+ autogen_context: AutogenContext,
+) -> None:
+ default_schema = inspector.bind.dialect.default_schema_name
+
+ # tables coming from the connection will not have "schema"
+ # set if it matches default_schema_name; so we need a list
+ # of table names from local metadata that also have "None" if schema
+ # == default_schema_name. Most setups will be like this anyway but
+ # some are not (see #170)
+ metadata_table_names_no_dflt_schema = OrderedSet(
+ [
+ (schema if schema != default_schema else None, tname)
+ for schema, tname in metadata_table_names
+ ]
+ )
+
+ # to adjust for the MetaData collection storing the tables either
+ # as "schemaname.tablename" or just "tablename", create a new lookup
+ # which will match the "non-default-schema" keys to the Table object.
+ tname_to_table = {
+ no_dflt_schema: autogen_context.table_key_to_table[
+ sa_schema._get_table_key(tname, schema)
+ ]
+ for no_dflt_schema, (schema, tname) in zip(
+ metadata_table_names_no_dflt_schema, metadata_table_names
+ )
+ }
+ metadata_table_names = metadata_table_names_no_dflt_schema
+
+ for s, tname in metadata_table_names.difference(conn_table_names):
+ name = "%s.%s" % (s, tname) if s else tname
+ metadata_table = tname_to_table[(s, tname)]
+ if autogen_context.run_object_filters(
+ metadata_table, tname, "table", False, None
+ ):
+ upgrade_ops.ops.append(
+ ops.CreateTableOp.from_table(metadata_table)
+ )
+ log.info("Detected added table %r", name)
+ modify_table_ops = ops.ModifyTableOps(tname, [], schema=s)
+
+ comparators.dispatch("table")(
+ autogen_context,
+ modify_table_ops,
+ s,
+ tname,
+ None,
+ metadata_table,
+ )
+ if not modify_table_ops.is_empty():
+ upgrade_ops.ops.append(modify_table_ops)
+
+ removal_metadata = sa_schema.MetaData()
+ for s, tname in conn_table_names.difference(metadata_table_names):
+ name = sa_schema._get_table_key(tname, s)
+ exists = name in removal_metadata.tables
+ t = sa_schema.Table(tname, removal_metadata, schema=s)
+
+ if not exists:
+ event.listen(
+ t,
+ "column_reflect",
+ # fmt: off
+ autogen_context.migration_context.impl.
+ _compat_autogen_column_reflect
+ (inspector),
+ # fmt: on
+ )
+ sqla_compat._reflect_table(inspector, t)
+ if autogen_context.run_object_filters(t, tname, "table", True, None):
+ modify_table_ops = ops.ModifyTableOps(tname, [], schema=s)
+
+ comparators.dispatch("table")(
+ autogen_context, modify_table_ops, s, tname, t, None
+ )
+ if not modify_table_ops.is_empty():
+ upgrade_ops.ops.append(modify_table_ops)
+
+ upgrade_ops.ops.append(ops.DropTableOp.from_table(t))
+ log.info("Detected removed table %r", name)
+
+ existing_tables = conn_table_names.intersection(metadata_table_names)
+
+ existing_metadata = sa_schema.MetaData()
+ conn_column_info = {}
+ for s, tname in existing_tables:
+ name = sa_schema._get_table_key(tname, s)
+ exists = name in existing_metadata.tables
+ t = sa_schema.Table(tname, existing_metadata, schema=s)
+ if not exists:
+ event.listen(
+ t,
+ "column_reflect",
+ # fmt: off
+ autogen_context.migration_context.impl.
+ _compat_autogen_column_reflect(inspector),
+ # fmt: on
+ )
+ sqla_compat._reflect_table(inspector, t)
+ conn_column_info[(s, tname)] = t
+
+ for s, tname in sorted(existing_tables, key=lambda x: (x[0] or "", x[1])):
+ s = s or None
+ name = "%s.%s" % (s, tname) if s else tname
+ metadata_table = tname_to_table[(s, tname)]
+ conn_table = existing_metadata.tables[name]
+
+ if autogen_context.run_object_filters(
+ metadata_table, tname, "table", False, conn_table
+ ):
+ modify_table_ops = ops.ModifyTableOps(tname, [], schema=s)
+ with _compare_columns(
+ s,
+ tname,
+ conn_table,
+ metadata_table,
+ modify_table_ops,
+ autogen_context,
+ inspector,
+ ):
+ comparators.dispatch("table")(
+ autogen_context,
+ modify_table_ops,
+ s,
+ tname,
+ conn_table,
+ metadata_table,
+ )
+
+ if not modify_table_ops.is_empty():
+ upgrade_ops.ops.append(modify_table_ops)
+
+
+_IndexColumnSortingOps: Mapping[str, Any] = util.immutabledict(
+ {
+ "asc": expression.asc,
+ "desc": expression.desc,
+ "nulls_first": expression.nullsfirst,
+ "nulls_last": expression.nullslast,
+ "nullsfirst": expression.nullsfirst, # 1_3 name
+ "nullslast": expression.nullslast, # 1_3 name
+ }
+)
+
+
+def _make_index(
+ impl: DefaultImpl, params: Dict[str, Any], conn_table: Table
+) -> Optional[Index]:
+ exprs: list[Union[Column[Any], TextClause]] = []
+ sorting = params.get("column_sorting")
+
+ for num, col_name in enumerate(params["column_names"]):
+ item: Union[Column[Any], TextClause]
+ if col_name is None:
+ assert "expressions" in params
+ name = params["expressions"][num]
+ item = text(name)
+ else:
+ name = col_name
+ item = conn_table.c[col_name]
+ if sorting and name in sorting:
+ for operator in sorting[name]:
+ if operator in _IndexColumnSortingOps:
+ item = _IndexColumnSortingOps[operator](item)
+ exprs.append(item)
+ ix = sa_schema.Index(
+ params["name"],
+ *exprs,
+ unique=params["unique"],
+ _table=conn_table,
+ **impl.adjust_reflected_dialect_options(params, "index"),
+ )
+ if "duplicates_constraint" in params:
+ ix.info["duplicates_constraint"] = params["duplicates_constraint"]
+ return ix
+
+
+def _make_unique_constraint(
+ impl: DefaultImpl, params: Dict[str, Any], conn_table: Table
+) -> UniqueConstraint:
+ uq = sa_schema.UniqueConstraint(
+ *[conn_table.c[cname] for cname in params["column_names"]],
+ name=params["name"],
+ **impl.adjust_reflected_dialect_options(params, "unique_constraint"),
+ )
+ if "duplicates_index" in params:
+ uq.info["duplicates_index"] = params["duplicates_index"]
+
+ return uq
+
+
+def _make_foreign_key(
+ params: Dict[str, Any], conn_table: Table
+) -> ForeignKeyConstraint:
+ tname = params["referred_table"]
+ if params["referred_schema"]:
+ tname = "%s.%s" % (params["referred_schema"], tname)
+
+ options = params.get("options", {})
+
+ const = sa_schema.ForeignKeyConstraint(
+ [conn_table.c[cname] for cname in params["constrained_columns"]],
+ ["%s.%s" % (tname, n) for n in params["referred_columns"]],
+ onupdate=options.get("onupdate"),
+ ondelete=options.get("ondelete"),
+ deferrable=options.get("deferrable"),
+ initially=options.get("initially"),
+ name=params["name"],
+ )
+ # needed by 0.7
+ conn_table.append_constraint(const)
+ return const
+
+
+@contextlib.contextmanager
+def _compare_columns(
+ schema: Optional[str],
+ tname: Union[quoted_name, str],
+ conn_table: Table,
+ metadata_table: Table,
+ modify_table_ops: ModifyTableOps,
+ autogen_context: AutogenContext,
+ inspector: Inspector,
+) -> Iterator[None]:
+ name = "%s.%s" % (schema, tname) if schema else tname
+ metadata_col_names = OrderedSet(
+ c.name for c in metadata_table.c if not c.system
+ )
+ metadata_cols_by_name = {
+ c.name: c for c in metadata_table.c if not c.system
+ }
+
+ conn_col_names = {
+ c.name: c
+ for c in conn_table.c
+ if autogen_context.run_name_filters(
+ c.name, "column", {"table_name": tname, "schema_name": schema}
+ )
+ }
+
+ for cname in metadata_col_names.difference(conn_col_names):
+ if autogen_context.run_object_filters(
+ metadata_cols_by_name[cname], cname, "column", False, None
+ ):
+ modify_table_ops.ops.append(
+ ops.AddColumnOp.from_column_and_tablename(
+ schema, tname, metadata_cols_by_name[cname]
+ )
+ )
+ log.info("Detected added column '%s.%s'", name, cname)
+
+ for colname in metadata_col_names.intersection(conn_col_names):
+ metadata_col = metadata_cols_by_name[colname]
+ conn_col = conn_table.c[colname]
+ if not autogen_context.run_object_filters(
+ metadata_col, colname, "column", False, conn_col
+ ):
+ continue
+ alter_column_op = ops.AlterColumnOp(tname, colname, schema=schema)
+
+ comparators.dispatch("column")(
+ autogen_context,
+ alter_column_op,
+ schema,
+ tname,
+ colname,
+ conn_col,
+ metadata_col,
+ )
+
+ if alter_column_op.has_changes():
+ modify_table_ops.ops.append(alter_column_op)
+
+ yield
+
+ for cname in set(conn_col_names).difference(metadata_col_names):
+ if autogen_context.run_object_filters(
+ conn_table.c[cname], cname, "column", True, None
+ ):
+ modify_table_ops.ops.append(
+ ops.DropColumnOp.from_column_and_tablename(
+ schema, tname, conn_table.c[cname]
+ )
+ )
+ log.info("Detected removed column '%s.%s'", name, cname)
+
+
+class _constraint_sig:
+ const: Union[UniqueConstraint, ForeignKeyConstraint, Index]
+
+ def md_name_to_sql_name(self, context: AutogenContext) -> Optional[str]:
+ return sqla_compat._get_constraint_final_name(
+ self.const, context.dialect
+ )
+
+ def __eq__(self, other):
+ return self.const == other.const
+
+ def __ne__(self, other):
+ return self.const != other.const
+
+ def __hash__(self) -> int:
+ return hash(self.const)
+
+
+class _uq_constraint_sig(_constraint_sig):
+ is_index = False
+ is_unique = True
+
+ def __init__(self, const: UniqueConstraint, impl: DefaultImpl) -> None:
+ self.const = const
+ self.name = const.name
+ self.sig = ("UNIQUE_CONSTRAINT",) + impl.create_unique_constraint_sig(
+ const
+ )
+
+ @property
+ def column_names(self) -> List[str]:
+ return [col.name for col in self.const.columns]
+
+
+class _ix_constraint_sig(_constraint_sig):
+ is_index = True
+
+ def __init__(self, const: Index, impl: DefaultImpl) -> None:
+ self.const = const
+ self.name = const.name
+ self.sig = ("INDEX",) + impl.create_index_sig(const)
+ self.is_unique = bool(const.unique)
+
+ def md_name_to_sql_name(self, context: AutogenContext) -> Optional[str]:
+ return sqla_compat._get_constraint_final_name(
+ self.const, context.dialect
+ )
+
+ @property
+ def column_names(self) -> Union[List[quoted_name], List[None]]:
+ return sqla_compat._get_index_column_names(self.const)
+
+
+class _fk_constraint_sig(_constraint_sig):
+ def __init__(
+ self, const: ForeignKeyConstraint, include_options: bool = False
+ ) -> None:
+ self.const = const
+ self.name = const.name
+
+ (
+ self.source_schema,
+ self.source_table,
+ self.source_columns,
+ self.target_schema,
+ self.target_table,
+ self.target_columns,
+ onupdate,
+ ondelete,
+ deferrable,
+ initially,
+ ) = _fk_spec(const)
+
+ self.sig: Tuple[Any, ...] = (
+ self.source_schema,
+ self.source_table,
+ tuple(self.source_columns),
+ self.target_schema,
+ self.target_table,
+ tuple(self.target_columns),
+ )
+ if include_options:
+ self.sig += (
+ (None if onupdate.lower() == "no action" else onupdate.lower())
+ if onupdate
+ else None,
+ (None if ondelete.lower() == "no action" else ondelete.lower())
+ if ondelete
+ else None,
+ # convert initially + deferrable into one three-state value
+ "initially_deferrable"
+ if initially and initially.lower() == "deferred"
+ else "deferrable"
+ if deferrable
+ else "not deferrable",
+ )
+
+
+@comparators.dispatch_for("table")
+def _compare_indexes_and_uniques(
+ autogen_context: AutogenContext,
+ modify_ops: ModifyTableOps,
+ schema: Optional[str],
+ tname: Union[quoted_name, str],
+ conn_table: Optional[Table],
+ metadata_table: Optional[Table],
+) -> None:
+ inspector = autogen_context.inspector
+ is_create_table = conn_table is None
+ is_drop_table = metadata_table is None
+ impl = autogen_context.migration_context.impl
+
+ # 1a. get raw indexes and unique constraints from metadata ...
+ if metadata_table is not None:
+ metadata_unique_constraints = {
+ uq
+ for uq in metadata_table.constraints
+ if isinstance(uq, sa_schema.UniqueConstraint)
+ }
+ metadata_indexes = set(metadata_table.indexes)
+ else:
+ metadata_unique_constraints = set()
+ metadata_indexes = set()
+
+ conn_uniques = conn_indexes = frozenset() # type:ignore[var-annotated]
+
+ supports_unique_constraints = False
+
+ unique_constraints_duplicate_unique_indexes = False
+
+ if conn_table is not None:
+ # 1b. ... and from connection, if the table exists
+ if hasattr(inspector, "get_unique_constraints"):
+ try:
+ conn_uniques = inspector.get_unique_constraints( # type:ignore[assignment] # noqa
+ tname, schema=schema
+ )
+ supports_unique_constraints = True
+ except NotImplementedError:
+ pass
+ except TypeError:
+ # number of arguments is off for the base
+ # method in SQLAlchemy due to the cache decorator
+ # not being present
+ pass
+ else:
+ conn_uniques = [ # type:ignore[assignment]
+ uq
+ for uq in conn_uniques
+ if autogen_context.run_name_filters(
+ uq["name"],
+ "unique_constraint",
+ {"table_name": tname, "schema_name": schema},
+ )
+ ]
+ for uq in conn_uniques:
+ if uq.get("duplicates_index"):
+ unique_constraints_duplicate_unique_indexes = True
+ try:
+ conn_indexes = inspector.get_indexes( # type:ignore[assignment]
+ tname, schema=schema
+ )
+ except NotImplementedError:
+ pass
+ else:
+ conn_indexes = [ # type:ignore[assignment]
+ ix
+ for ix in conn_indexes
+ if autogen_context.run_name_filters(
+ ix["name"],
+ "index",
+ {"table_name": tname, "schema_name": schema},
+ )
+ ]
+
+ # 2. convert conn-level objects from raw inspector records
+ # into schema objects
+ if is_drop_table:
+ # for DROP TABLE uniques are inline, don't need them
+ conn_uniques = set() # type:ignore[assignment]
+ else:
+ conn_uniques = { # type:ignore[assignment]
+ _make_unique_constraint(impl, uq_def, conn_table)
+ for uq_def in conn_uniques
+ }
+
+ conn_indexes = { # type:ignore[assignment]
+ index
+ for index in (
+ _make_index(impl, ix, conn_table) for ix in conn_indexes
+ )
+ if index is not None
+ }
+
+ # 2a. if the dialect dupes unique indexes as unique constraints
+ # (mysql and oracle), correct for that
+
+ if unique_constraints_duplicate_unique_indexes:
+ _correct_for_uq_duplicates_uix(
+ conn_uniques,
+ conn_indexes,
+ metadata_unique_constraints,
+ metadata_indexes,
+ autogen_context.dialect,
+ impl,
+ )
+
+ # 3. give the dialect a chance to omit indexes and constraints that
+ # we know are either added implicitly by the DB or that the DB
+ # can't accurately report on
+ autogen_context.migration_context.impl.correct_for_autogen_constraints(
+ conn_uniques, # type: ignore[arg-type]
+ conn_indexes, # type: ignore[arg-type]
+ metadata_unique_constraints,
+ metadata_indexes,
+ )
+
+ # 4. organize the constraints into "signature" collections, the
+ # _constraint_sig() objects provide a consistent facade over both
+ # Index and UniqueConstraint so we can easily work with them
+ # interchangeably
+ metadata_unique_constraints_sig = {
+ _uq_constraint_sig(uq, impl) for uq in metadata_unique_constraints
+ }
+
+ metadata_indexes_sig = {
+ _ix_constraint_sig(ix, impl) for ix in metadata_indexes
+ }
+
+ conn_unique_constraints = {
+ _uq_constraint_sig(uq, impl) for uq in conn_uniques
+ }
+
+ conn_indexes_sig = {_ix_constraint_sig(ix, impl) for ix in conn_indexes}
+
+ # 5. index things by name, for those objects that have names
+ metadata_names = {
+ cast(str, c.md_name_to_sql_name(autogen_context)): c
+ for c in metadata_unique_constraints_sig.union(
+ metadata_indexes_sig # type:ignore[arg-type]
+ )
+ if isinstance(c, _ix_constraint_sig)
+ or sqla_compat._constraint_is_named(c.const, autogen_context.dialect)
+ }
+
+ conn_uniques_by_name: Dict[sqla_compat._ConstraintName, _uq_constraint_sig]
+ conn_indexes_by_name: Dict[sqla_compat._ConstraintName, _ix_constraint_sig]
+
+ conn_uniques_by_name = {c.name: c for c in conn_unique_constraints}
+ conn_indexes_by_name = {c.name: c for c in conn_indexes_sig}
+ conn_names = {
+ c.name: c
+ for c in conn_unique_constraints.union(conn_indexes_sig)
+ if sqla_compat.constraint_name_string(c.name)
+ }
+
+ doubled_constraints = {
+ name: (conn_uniques_by_name[name], conn_indexes_by_name[name])
+ for name in set(conn_uniques_by_name).intersection(
+ conn_indexes_by_name
+ )
+ }
+
+ # 6. index things by "column signature", to help with unnamed unique
+ # constraints.
+ conn_uniques_by_sig = {uq.sig: uq for uq in conn_unique_constraints}
+ metadata_uniques_by_sig = {
+ uq.sig: uq for uq in metadata_unique_constraints_sig
+ }
+ metadata_indexes_by_sig = {ix.sig: ix for ix in metadata_indexes_sig}
+ unnamed_metadata_uniques = {
+ uq.sig: uq
+ for uq in metadata_unique_constraints_sig
+ if not sqla_compat._constraint_is_named(
+ uq.const, autogen_context.dialect
+ )
+ }
+
+ # assumptions:
+ # 1. a unique constraint or an index from the connection *always*
+ # has a name.
+ # 2. an index on the metadata side *always* has a name.
+ # 3. a unique constraint on the metadata side *might* have a name.
+ # 4. The backend may double up indexes as unique constraints and
+ # vice versa (e.g. MySQL, Postgresql)
+
+ def obj_added(obj):
+ if obj.is_index:
+ if autogen_context.run_object_filters(
+ obj.const, obj.name, "index", False, None
+ ):
+ modify_ops.ops.append(ops.CreateIndexOp.from_index(obj.const))
+ log.info(
+ "Detected added index '%s' on %s",
+ obj.name,
+ ", ".join(["'%s'" % obj.column_names]),
+ )
+ else:
+ if not supports_unique_constraints:
+ # can't report unique indexes as added if we don't
+ # detect them
+ return
+ if is_create_table or is_drop_table:
+ # unique constraints are created inline with table defs
+ return
+ if autogen_context.run_object_filters(
+ obj.const, obj.name, "unique_constraint", False, None
+ ):
+ modify_ops.ops.append(
+ ops.AddConstraintOp.from_constraint(obj.const)
+ )
+ log.info(
+ "Detected added unique constraint '%s' on %s",
+ obj.name,
+ ", ".join(["'%s'" % obj.column_names]),
+ )
+
+ def obj_removed(obj):
+ if obj.is_index:
+ if obj.is_unique and not supports_unique_constraints:
+ # many databases double up unique constraints
+ # as unique indexes. without that list we can't
+ # be sure what we're doing here
+ return
+
+ if autogen_context.run_object_filters(
+ obj.const, obj.name, "index", True, None
+ ):
+ modify_ops.ops.append(ops.DropIndexOp.from_index(obj.const))
+ log.info(
+ "Detected removed index '%s' on '%s'", obj.name, tname
+ )
+ else:
+ if is_create_table or is_drop_table:
+ # if the whole table is being dropped, we don't need to
+ # consider unique constraint separately
+ return
+ if autogen_context.run_object_filters(
+ obj.const, obj.name, "unique_constraint", True, None
+ ):
+ modify_ops.ops.append(
+ ops.DropConstraintOp.from_constraint(obj.const)
+ )
+ log.info(
+ "Detected removed unique constraint '%s' on '%s'",
+ obj.name,
+ tname,
+ )
+
+ def obj_changed(old, new, msg):
+ if old.is_index:
+ if autogen_context.run_object_filters(
+ new.const, new.name, "index", False, old.const
+ ):
+ log.info(
+ "Detected changed index '%s' on '%s':%s",
+ old.name,
+ tname,
+ ", ".join(msg),
+ )
+ modify_ops.ops.append(ops.DropIndexOp.from_index(old.const))
+ modify_ops.ops.append(ops.CreateIndexOp.from_index(new.const))
+ else:
+ if autogen_context.run_object_filters(
+ new.const, new.name, "unique_constraint", False, old.const
+ ):
+ log.info(
+ "Detected changed unique constraint '%s' on '%s':%s",
+ old.name,
+ tname,
+ ", ".join(msg),
+ )
+ modify_ops.ops.append(
+ ops.DropConstraintOp.from_constraint(old.const)
+ )
+ modify_ops.ops.append(
+ ops.AddConstraintOp.from_constraint(new.const)
+ )
+
+ for removed_name in sorted(set(conn_names).difference(metadata_names)):
+ conn_obj: Union[_ix_constraint_sig, _uq_constraint_sig] = conn_names[
+ removed_name
+ ]
+ if not conn_obj.is_index and conn_obj.sig in unnamed_metadata_uniques:
+ continue
+ elif removed_name in doubled_constraints:
+ conn_uq, conn_idx = doubled_constraints[removed_name]
+ if (
+ conn_idx.sig not in metadata_indexes_by_sig
+ and conn_uq.sig not in metadata_uniques_by_sig
+ ):
+ obj_removed(conn_uq)
+ obj_removed(conn_idx)
+ else:
+ obj_removed(conn_obj)
+
+ for existing_name in sorted(set(metadata_names).intersection(conn_names)):
+ metadata_obj = metadata_names[existing_name]
+
+ if existing_name in doubled_constraints:
+ conn_uq, conn_idx = doubled_constraints[existing_name]
+ if metadata_obj.is_index:
+ conn_obj = conn_idx
+ else:
+ conn_obj = conn_uq
+ else:
+ conn_obj = conn_names[existing_name]
+
+ if conn_obj.is_index != metadata_obj.is_index:
+ obj_removed(conn_obj)
+ obj_added(metadata_obj)
+ else:
+ msg = []
+ if conn_obj.is_unique != metadata_obj.is_unique:
+ msg.append(
+ " unique=%r to unique=%r"
+ % (conn_obj.is_unique, metadata_obj.is_unique)
+ )
+ if conn_obj.sig != metadata_obj.sig:
+ msg.append(
+ " expression %r to %r" % (conn_obj.sig, metadata_obj.sig)
+ )
+
+ if msg:
+ obj_changed(conn_obj, metadata_obj, msg)
+
+ for added_name in sorted(set(metadata_names).difference(conn_names)):
+ obj = metadata_names[added_name]
+ obj_added(obj)
+
+ for uq_sig in unnamed_metadata_uniques:
+ if uq_sig not in conn_uniques_by_sig:
+ obj_added(unnamed_metadata_uniques[uq_sig])
+
+
+def _correct_for_uq_duplicates_uix(
+ conn_unique_constraints,
+ conn_indexes,
+ metadata_unique_constraints,
+ metadata_indexes,
+ dialect,
+ impl,
+):
+ # dedupe unique indexes vs. constraints, since MySQL / Oracle
+ # doesn't really have unique constraints as a separate construct.
+ # but look in the metadata and try to maintain constructs
+ # that already seem to be defined one way or the other
+ # on that side. This logic was formerly local to MySQL dialect,
+ # generalized to Oracle and others. See #276
+
+ # resolve final rendered name for unique constraints defined in the
+ # metadata. this includes truncation of long names. naming convention
+ # names currently should already be set as cons.name, however leave this
+ # to the sqla_compat to decide.
+ metadata_cons_names = [
+ (sqla_compat._get_constraint_final_name(cons, dialect), cons)
+ for cons in metadata_unique_constraints
+ ]
+
+ metadata_uq_names = {
+ name for name, cons in metadata_cons_names if name is not None
+ }
+
+ unnamed_metadata_uqs = {
+ _uq_constraint_sig(cons, impl).sig
+ for name, cons in metadata_cons_names
+ if name is None
+ }
+
+ metadata_ix_names = {
+ sqla_compat._get_constraint_final_name(cons, dialect)
+ for cons in metadata_indexes
+ if cons.unique
+ }
+
+ # for reflection side, names are in their final database form
+ # already since they're from the database
+ conn_ix_names = {cons.name: cons for cons in conn_indexes if cons.unique}
+
+ uqs_dupe_indexes = {
+ cons.name: cons
+ for cons in conn_unique_constraints
+ if cons.info["duplicates_index"]
+ }
+
+ for overlap in uqs_dupe_indexes:
+ if overlap not in metadata_uq_names:
+ if (
+ _uq_constraint_sig(uqs_dupe_indexes[overlap], impl).sig
+ not in unnamed_metadata_uqs
+ ):
+ conn_unique_constraints.discard(uqs_dupe_indexes[overlap])
+ elif overlap not in metadata_ix_names:
+ conn_indexes.discard(conn_ix_names[overlap])
+
+
+@comparators.dispatch_for("column")
+def _compare_nullable(
+ autogen_context: AutogenContext,
+ alter_column_op: AlterColumnOp,
+ schema: Optional[str],
+ tname: Union[quoted_name, str],
+ cname: Union[quoted_name, str],
+ conn_col: Column[Any],
+ metadata_col: Column[Any],
+) -> None:
+ metadata_col_nullable = metadata_col.nullable
+ conn_col_nullable = conn_col.nullable
+ alter_column_op.existing_nullable = conn_col_nullable
+
+ if conn_col_nullable is not metadata_col_nullable:
+ if (
+ sqla_compat._server_default_is_computed(
+ metadata_col.server_default, conn_col.server_default
+ )
+ and sqla_compat._nullability_might_be_unset(metadata_col)
+ or (
+ sqla_compat._server_default_is_identity(
+ metadata_col.server_default, conn_col.server_default
+ )
+ )
+ ):
+ log.info(
+ "Ignoring nullable change on identity column '%s.%s'",
+ tname,
+ cname,
+ )
+ else:
+ alter_column_op.modify_nullable = metadata_col_nullable
+ log.info(
+ "Detected %s on column '%s.%s'",
+ "NULL" if metadata_col_nullable else "NOT NULL",
+ tname,
+ cname,
+ )
+
+
+@comparators.dispatch_for("column")
+def _setup_autoincrement(
+ autogen_context: AutogenContext,
+ alter_column_op: AlterColumnOp,
+ schema: Optional[str],
+ tname: Union[quoted_name, str],
+ cname: quoted_name,
+ conn_col: Column[Any],
+ metadata_col: Column[Any],
+) -> None:
+ if metadata_col.table._autoincrement_column is metadata_col:
+ alter_column_op.kw["autoincrement"] = True
+ elif metadata_col.autoincrement is True:
+ alter_column_op.kw["autoincrement"] = True
+ elif metadata_col.autoincrement is False:
+ alter_column_op.kw["autoincrement"] = False
+
+
+@comparators.dispatch_for("column")
+def _compare_type(
+ autogen_context: AutogenContext,
+ alter_column_op: AlterColumnOp,
+ schema: Optional[str],
+ tname: Union[quoted_name, str],
+ cname: Union[quoted_name, str],
+ conn_col: Column[Any],
+ metadata_col: Column[Any],
+) -> None:
+ conn_type = conn_col.type
+ alter_column_op.existing_type = conn_type
+ metadata_type = metadata_col.type
+ if conn_type._type_affinity is sqltypes.NullType:
+ log.info(
+ "Couldn't determine database type " "for column '%s.%s'",
+ tname,
+ cname,
+ )
+ return
+ if metadata_type._type_affinity is sqltypes.NullType:
+ log.info(
+ "Column '%s.%s' has no type within " "the model; can't compare",
+ tname,
+ cname,
+ )
+ return
+
+ isdiff = autogen_context.migration_context._compare_type(
+ conn_col, metadata_col
+ )
+
+ if isdiff:
+ alter_column_op.modify_type = metadata_type
+ log.info(
+ "Detected type change from %r to %r on '%s.%s'",
+ conn_type,
+ metadata_type,
+ tname,
+ cname,
+ )
+
+
+def _render_server_default_for_compare(
+ metadata_default: Optional[Any], autogen_context: AutogenContext
+) -> Optional[str]:
+ if isinstance(metadata_default, sa_schema.DefaultClause):
+ if isinstance(metadata_default.arg, str):
+ metadata_default = metadata_default.arg
+ else:
+ metadata_default = str(
+ metadata_default.arg.compile(
+ dialect=autogen_context.dialect,
+ compile_kwargs={"literal_binds": True},
+ )
+ )
+ if isinstance(metadata_default, str):
+ return metadata_default
+ else:
+ return None
+
+
+def _normalize_computed_default(sqltext: str) -> str:
+ """we want to warn if a computed sql expression has changed. however
+ we don't want false positives and the warning is not that critical.
+ so filter out most forms of variability from the SQL text.
+
+ """
+
+ return re.sub(r"[ \(\)'\"`\[\]]", "", sqltext).lower()
+
+
+def _compare_computed_default(
+ autogen_context: AutogenContext,
+ alter_column_op: AlterColumnOp,
+ schema: Optional[str],
+ tname: str,
+ cname: str,
+ conn_col: Column[Any],
+ metadata_col: Column[Any],
+) -> None:
+ rendered_metadata_default = str(
+ cast(sa_schema.Computed, metadata_col.server_default).sqltext.compile(
+ dialect=autogen_context.dialect,
+ compile_kwargs={"literal_binds": True},
+ )
+ )
+
+ # since we cannot change computed columns, we do only a crude comparison
+ # here where we try to eliminate syntactical differences in order to
+ # get a minimal comparison just to emit a warning.
+
+ rendered_metadata_default = _normalize_computed_default(
+ rendered_metadata_default
+ )
+
+ if isinstance(conn_col.server_default, sa_schema.Computed):
+ rendered_conn_default = str(
+ conn_col.server_default.sqltext.compile(
+ dialect=autogen_context.dialect,
+ compile_kwargs={"literal_binds": True},
+ )
+ )
+ if rendered_conn_default is None:
+ rendered_conn_default = ""
+ else:
+ rendered_conn_default = _normalize_computed_default(
+ rendered_conn_default
+ )
+ else:
+ rendered_conn_default = ""
+
+ if rendered_metadata_default != rendered_conn_default:
+ _warn_computed_not_supported(tname, cname)
+
+
+def _warn_computed_not_supported(tname: str, cname: str) -> None:
+ util.warn("Computed default on %s.%s cannot be modified" % (tname, cname))
+
+
+def _compare_identity_default(
+ autogen_context,
+ alter_column_op,
+ schema,
+ tname,
+ cname,
+ conn_col,
+ metadata_col,
+):
+ impl = autogen_context.migration_context.impl
+ diff, ignored_attr, is_alter = impl._compare_identity_default(
+ metadata_col.server_default, conn_col.server_default
+ )
+
+ return diff, is_alter
+
+
+@comparators.dispatch_for("column")
+def _compare_server_default(
+ autogen_context: AutogenContext,
+ alter_column_op: AlterColumnOp,
+ schema: Optional[str],
+ tname: Union[quoted_name, str],
+ cname: Union[quoted_name, str],
+ conn_col: Column[Any],
+ metadata_col: Column[Any],
+) -> Optional[bool]:
+ metadata_default = metadata_col.server_default
+ conn_col_default = conn_col.server_default
+ if conn_col_default is None and metadata_default is None:
+ return False
+
+ if sqla_compat._server_default_is_computed(metadata_default):
+ # return False in case of a computed column as the server
+ # default. Note that DDL for adding or removing "GENERATED AS" from
+ # an existing column is not currently known for any backend.
+ # Once SQLAlchemy can reflect "GENERATED" as the "computed" element,
+ # we would also want to ignore and/or warn for changes vs. the
+ # metadata (or support backend specific DDL if applicable).
+ if not sqla_compat.has_computed_reflection:
+ return False
+
+ else:
+ return (
+ _compare_computed_default( # type:ignore[func-returns-value]
+ autogen_context,
+ alter_column_op,
+ schema,
+ tname,
+ cname,
+ conn_col,
+ metadata_col,
+ )
+ )
+ if sqla_compat._server_default_is_computed(conn_col_default):
+ _warn_computed_not_supported(tname, cname)
+ return False
+
+ if sqla_compat._server_default_is_identity(
+ metadata_default, conn_col_default
+ ):
+ alter_column_op.existing_server_default = conn_col_default
+ diff, is_alter = _compare_identity_default(
+ autogen_context,
+ alter_column_op,
+ schema,
+ tname,
+ cname,
+ conn_col,
+ metadata_col,
+ )
+ if is_alter:
+ alter_column_op.modify_server_default = metadata_default
+ if diff:
+ log.info(
+ "Detected server default on column '%s.%s': "
+ "identity options attributes %s",
+ tname,
+ cname,
+ sorted(diff),
+ )
+ else:
+ rendered_metadata_default = _render_server_default_for_compare(
+ metadata_default, autogen_context
+ )
+
+ rendered_conn_default = (
+ cast(Any, conn_col_default).arg.text if conn_col_default else None
+ )
+
+ alter_column_op.existing_server_default = conn_col_default
+
+ is_diff = autogen_context.migration_context._compare_server_default(
+ conn_col,
+ metadata_col,
+ rendered_metadata_default,
+ rendered_conn_default,
+ )
+ if is_diff:
+ alter_column_op.modify_server_default = metadata_default
+ log.info("Detected server default on column '%s.%s'", tname, cname)
+
+ return None
+
+
+@comparators.dispatch_for("column")
+def _compare_column_comment(
+ autogen_context: AutogenContext,
+ alter_column_op: AlterColumnOp,
+ schema: Optional[str],
+ tname: Union[quoted_name, str],
+ cname: quoted_name,
+ conn_col: Column[Any],
+ metadata_col: Column[Any],
+) -> Optional[Literal[False]]:
+ assert autogen_context.dialect is not None
+ if not autogen_context.dialect.supports_comments:
+ return None
+
+ metadata_comment = metadata_col.comment
+ conn_col_comment = conn_col.comment
+ if conn_col_comment is None and metadata_comment is None:
+ return False
+
+ alter_column_op.existing_comment = conn_col_comment
+
+ if conn_col_comment != metadata_comment:
+ alter_column_op.modify_comment = metadata_comment
+ log.info("Detected column comment '%s.%s'", tname, cname)
+
+ return None
+
+
+@comparators.dispatch_for("table")
+def _compare_foreign_keys(
+ autogen_context: AutogenContext,
+ modify_table_ops: ModifyTableOps,
+ schema: Optional[str],
+ tname: Union[quoted_name, str],
+ conn_table: Optional[Table],
+ metadata_table: Optional[Table],
+) -> None:
+ # if we're doing CREATE TABLE, all FKs are created
+ # inline within the table def
+ if conn_table is None or metadata_table is None:
+ return
+
+ inspector = autogen_context.inspector
+ metadata_fks = {
+ fk
+ for fk in metadata_table.constraints
+ if isinstance(fk, sa_schema.ForeignKeyConstraint)
+ }
+
+ conn_fks_list = [
+ fk
+ for fk in inspector.get_foreign_keys(tname, schema=schema)
+ if autogen_context.run_name_filters(
+ fk["name"],
+ "foreign_key_constraint",
+ {"table_name": tname, "schema_name": schema},
+ )
+ ]
+
+ backend_reflects_fk_options = bool(
+ conn_fks_list and "options" in conn_fks_list[0]
+ )
+
+ conn_fks = {
+ _make_foreign_key(const, conn_table) # type: ignore[arg-type]
+ for const in conn_fks_list
+ }
+
+ # give the dialect a chance to correct the FKs to match more
+ # closely
+ autogen_context.migration_context.impl.correct_for_autogen_foreignkeys(
+ conn_fks, metadata_fks
+ )
+
+ metadata_fks_sig = {
+ _fk_constraint_sig(fk, include_options=backend_reflects_fk_options)
+ for fk in metadata_fks
+ }
+
+ conn_fks_sig = {
+ _fk_constraint_sig(fk, include_options=backend_reflects_fk_options)
+ for fk in conn_fks
+ }
+
+ conn_fks_by_sig = {c.sig: c for c in conn_fks_sig}
+ metadata_fks_by_sig = {c.sig: c for c in metadata_fks_sig}
+
+ metadata_fks_by_name = {
+ c.name: c for c in metadata_fks_sig if c.name is not None
+ }
+ conn_fks_by_name = {c.name: c for c in conn_fks_sig if c.name is not None}
+
+ def _add_fk(obj, compare_to):
+ if autogen_context.run_object_filters(
+ obj.const, obj.name, "foreign_key_constraint", False, compare_to
+ ):
+ modify_table_ops.ops.append(
+ ops.CreateForeignKeyOp.from_constraint(const.const)
+ )
+
+ log.info(
+ "Detected added foreign key (%s)(%s) on table %s%s",
+ ", ".join(obj.source_columns),
+ ", ".join(obj.target_columns),
+ "%s." % obj.source_schema if obj.source_schema else "",
+ obj.source_table,
+ )
+
+ def _remove_fk(obj, compare_to):
+ if autogen_context.run_object_filters(
+ obj.const, obj.name, "foreign_key_constraint", True, compare_to
+ ):
+ modify_table_ops.ops.append(
+ ops.DropConstraintOp.from_constraint(obj.const)
+ )
+ log.info(
+ "Detected removed foreign key (%s)(%s) on table %s%s",
+ ", ".join(obj.source_columns),
+ ", ".join(obj.target_columns),
+ "%s." % obj.source_schema if obj.source_schema else "",
+ obj.source_table,
+ )
+
+ # so far it appears we don't need to do this by name at all.
+ # SQLite doesn't preserve constraint names anyway
+
+ for removed_sig in set(conn_fks_by_sig).difference(metadata_fks_by_sig):
+ const = conn_fks_by_sig[removed_sig]
+ if removed_sig not in metadata_fks_by_sig:
+ compare_to = (
+ metadata_fks_by_name[const.name].const
+ if const.name in metadata_fks_by_name
+ else None
+ )
+ _remove_fk(const, compare_to)
+
+ for added_sig in set(metadata_fks_by_sig).difference(conn_fks_by_sig):
+ const = metadata_fks_by_sig[added_sig]
+ if added_sig not in conn_fks_by_sig:
+ compare_to = (
+ conn_fks_by_name[const.name].const
+ if const.name in conn_fks_by_name
+ else None
+ )
+ _add_fk(const, compare_to)
+
+
+@comparators.dispatch_for("table")
+def _compare_table_comment(
+ autogen_context: AutogenContext,
+ modify_table_ops: ModifyTableOps,
+ schema: Optional[str],
+ tname: Union[quoted_name, str],
+ conn_table: Optional[Table],
+ metadata_table: Optional[Table],
+) -> None:
+ assert autogen_context.dialect is not None
+ if not autogen_context.dialect.supports_comments:
+ return
+
+ # if we're doing CREATE TABLE, comments will be created inline
+ # with the create_table op.
+ if conn_table is None or metadata_table is None:
+ return
+
+ if conn_table.comment is None and metadata_table.comment is None:
+ return
+
+ if metadata_table.comment is None and conn_table.comment is not None:
+ modify_table_ops.ops.append(
+ ops.DropTableCommentOp(
+ tname, existing_comment=conn_table.comment, schema=schema
+ )
+ )
+ elif metadata_table.comment != conn_table.comment:
+ modify_table_ops.ops.append(
+ ops.CreateTableCommentOp(
+ tname,
+ metadata_table.comment,
+ existing_comment=conn_table.comment,
+ schema=schema,
+ )
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/render.py b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/render.py
new file mode 100644
index 00000000..9c84cd6c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/render.py
@@ -0,0 +1,1082 @@
+from __future__ import annotations
+
+from io import StringIO
+import re
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import List
+from typing import Optional
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+from mako.pygen import PythonPrinter
+from sqlalchemy import schema as sa_schema
+from sqlalchemy import sql
+from sqlalchemy import types as sqltypes
+from sqlalchemy.sql.elements import conv
+from sqlalchemy.sql.elements import quoted_name
+
+from .. import util
+from ..operations import ops
+from ..util import sqla_compat
+
+if TYPE_CHECKING:
+ from typing import Literal
+
+ from sqlalchemy.sql.base import DialectKWArgs
+ from sqlalchemy.sql.elements import ColumnElement
+ from sqlalchemy.sql.elements import TextClause
+ from sqlalchemy.sql.schema import CheckConstraint
+ from sqlalchemy.sql.schema import Column
+ from sqlalchemy.sql.schema import Constraint
+ from sqlalchemy.sql.schema import FetchedValue
+ from sqlalchemy.sql.schema import ForeignKey
+ from sqlalchemy.sql.schema import ForeignKeyConstraint
+ from sqlalchemy.sql.schema import Index
+ from sqlalchemy.sql.schema import MetaData
+ from sqlalchemy.sql.schema import PrimaryKeyConstraint
+ from sqlalchemy.sql.schema import UniqueConstraint
+ from sqlalchemy.sql.sqltypes import ARRAY
+ from sqlalchemy.sql.type_api import TypeEngine
+
+ from alembic.autogenerate.api import AutogenContext
+ from alembic.config import Config
+ from alembic.operations.ops import MigrationScript
+ from alembic.operations.ops import ModifyTableOps
+ from alembic.util.sqla_compat import Computed
+ from alembic.util.sqla_compat import Identity
+
+
+MAX_PYTHON_ARGS = 255
+
+
+def _render_gen_name(
+ autogen_context: AutogenContext,
+ name: sqla_compat._ConstraintName,
+) -> Optional[Union[quoted_name, str, _f_name]]:
+ if isinstance(name, conv):
+ return _f_name(_alembic_autogenerate_prefix(autogen_context), name)
+ else:
+ return sqla_compat.constraint_name_or_none(name)
+
+
+def _indent(text: str) -> str:
+ text = re.compile(r"^", re.M).sub(" ", text).strip()
+ text = re.compile(r" +$", re.M).sub("", text)
+ return text
+
+
+def _render_python_into_templatevars(
+ autogen_context: AutogenContext,
+ migration_script: MigrationScript,
+ template_args: Dict[str, Union[str, Config]],
+) -> None:
+ imports = autogen_context.imports
+
+ for upgrade_ops, downgrade_ops in zip(
+ migration_script.upgrade_ops_list, migration_script.downgrade_ops_list
+ ):
+ template_args[upgrade_ops.upgrade_token] = _indent(
+ _render_cmd_body(upgrade_ops, autogen_context)
+ )
+ template_args[downgrade_ops.downgrade_token] = _indent(
+ _render_cmd_body(downgrade_ops, autogen_context)
+ )
+ template_args["imports"] = "\n".join(sorted(imports))
+
+
+default_renderers = renderers = util.Dispatcher()
+
+
+def _render_cmd_body(
+ op_container: ops.OpContainer,
+ autogen_context: AutogenContext,
+) -> str:
+ buf = StringIO()
+ printer = PythonPrinter(buf)
+
+ printer.writeline(
+ "# ### commands auto generated by Alembic - please adjust! ###"
+ )
+
+ has_lines = False
+ for op in op_container.ops:
+ lines = render_op(autogen_context, op)
+ has_lines = has_lines or bool(lines)
+
+ for line in lines:
+ printer.writeline(line)
+
+ if not has_lines:
+ printer.writeline("pass")
+
+ printer.writeline("# ### end Alembic commands ###")
+
+ return buf.getvalue()
+
+
+def render_op(
+ autogen_context: AutogenContext, op: ops.MigrateOperation
+) -> List[str]:
+ renderer = renderers.dispatch(op)
+ lines = util.to_list(renderer(autogen_context, op))
+ return lines
+
+
+def render_op_text(
+ autogen_context: AutogenContext, op: ops.MigrateOperation
+) -> str:
+ return "\n".join(render_op(autogen_context, op))
+
+
+@renderers.dispatch_for(ops.ModifyTableOps)
+def _render_modify_table(
+ autogen_context: AutogenContext, op: ModifyTableOps
+) -> List[str]:
+ opts = autogen_context.opts
+ render_as_batch = opts.get("render_as_batch", False)
+
+ if op.ops:
+ lines = []
+ if render_as_batch:
+ with autogen_context._within_batch():
+ lines.append(
+ "with op.batch_alter_table(%r, schema=%r) as batch_op:"
+ % (op.table_name, op.schema)
+ )
+ for t_op in op.ops:
+ t_lines = render_op(autogen_context, t_op)
+ lines.extend(t_lines)
+ lines.append("")
+ else:
+ for t_op in op.ops:
+ t_lines = render_op(autogen_context, t_op)
+ lines.extend(t_lines)
+
+ return lines
+ else:
+ return []
+
+
+@renderers.dispatch_for(ops.CreateTableCommentOp)
+def _render_create_table_comment(
+ autogen_context: AutogenContext, op: ops.CreateTableCommentOp
+) -> str:
+ templ = (
+ "{prefix}create_table_comment(\n"
+ "{indent}'{tname}',\n"
+ "{indent}{comment},\n"
+ "{indent}existing_comment={existing},\n"
+ "{indent}schema={schema}\n"
+ ")"
+ )
+ return templ.format(
+ prefix=_alembic_autogenerate_prefix(autogen_context),
+ tname=op.table_name,
+ comment="%r" % op.comment if op.comment is not None else None,
+ existing="%r" % op.existing_comment
+ if op.existing_comment is not None
+ else None,
+ schema="'%s'" % op.schema if op.schema is not None else None,
+ indent=" ",
+ )
+
+
+@renderers.dispatch_for(ops.DropTableCommentOp)
+def _render_drop_table_comment(
+ autogen_context: AutogenContext, op: ops.DropTableCommentOp
+) -> str:
+ templ = (
+ "{prefix}drop_table_comment(\n"
+ "{indent}'{tname}',\n"
+ "{indent}existing_comment={existing},\n"
+ "{indent}schema={schema}\n"
+ ")"
+ )
+ return templ.format(
+ prefix=_alembic_autogenerate_prefix(autogen_context),
+ tname=op.table_name,
+ existing="%r" % op.existing_comment
+ if op.existing_comment is not None
+ else None,
+ schema="'%s'" % op.schema if op.schema is not None else None,
+ indent=" ",
+ )
+
+
+@renderers.dispatch_for(ops.CreateTableOp)
+def _add_table(autogen_context: AutogenContext, op: ops.CreateTableOp) -> str:
+ table = op.to_table()
+
+ args = [
+ col
+ for col in [
+ _render_column(col, autogen_context) for col in table.columns
+ ]
+ if col
+ ] + sorted(
+ [
+ rcons
+ for rcons in [
+ _render_constraint(
+ cons, autogen_context, op._namespace_metadata
+ )
+ for cons in table.constraints
+ ]
+ if rcons is not None
+ ]
+ )
+
+ if len(args) > MAX_PYTHON_ARGS:
+ args_str = "*[" + ",\n".join(args) + "]"
+ else:
+ args_str = ",\n".join(args)
+
+ text = "%(prefix)screate_table(%(tablename)r,\n%(args)s" % {
+ "tablename": _ident(op.table_name),
+ "prefix": _alembic_autogenerate_prefix(autogen_context),
+ "args": args_str,
+ }
+ if op.schema:
+ text += ",\nschema=%r" % _ident(op.schema)
+
+ comment = table.comment
+ if comment:
+ text += ",\ncomment=%r" % _ident(comment)
+
+ info = table.info
+ if info:
+ text += f",\ninfo={info!r}"
+
+ for k in sorted(op.kw):
+ text += ",\n%s=%r" % (k.replace(" ", "_"), op.kw[k])
+
+ if table._prefixes:
+ prefixes = ", ".join("'%s'" % p for p in table._prefixes)
+ text += ",\nprefixes=[%s]" % prefixes
+
+ text += "\n)"
+ return text
+
+
+@renderers.dispatch_for(ops.DropTableOp)
+def _drop_table(autogen_context: AutogenContext, op: ops.DropTableOp) -> str:
+ text = "%(prefix)sdrop_table(%(tname)r" % {
+ "prefix": _alembic_autogenerate_prefix(autogen_context),
+ "tname": _ident(op.table_name),
+ }
+ if op.schema:
+ text += ", schema=%r" % _ident(op.schema)
+ text += ")"
+ return text
+
+
+def _render_dialect_kwargs_items(
+ autogen_context: AutogenContext, item: DialectKWArgs
+) -> list[str]:
+ return [
+ f"{key}={_render_potential_expr(val, autogen_context)}"
+ for key, val in item.dialect_kwargs.items()
+ ]
+
+
+@renderers.dispatch_for(ops.CreateIndexOp)
+def _add_index(autogen_context: AutogenContext, op: ops.CreateIndexOp) -> str:
+ index = op.to_index()
+
+ has_batch = autogen_context._has_batch
+
+ if has_batch:
+ tmpl = (
+ "%(prefix)screate_index(%(name)r, [%(columns)s], "
+ "unique=%(unique)r%(kwargs)s)"
+ )
+ else:
+ tmpl = (
+ "%(prefix)screate_index(%(name)r, %(table)r, [%(columns)s], "
+ "unique=%(unique)r%(schema)s%(kwargs)s)"
+ )
+
+ assert index.table is not None
+
+ opts = _render_dialect_kwargs_items(autogen_context, index)
+ text = tmpl % {
+ "prefix": _alembic_autogenerate_prefix(autogen_context),
+ "name": _render_gen_name(autogen_context, index.name),
+ "table": _ident(index.table.name),
+ "columns": ", ".join(
+ _get_index_rendered_expressions(index, autogen_context)
+ ),
+ "unique": index.unique or False,
+ "schema": (", schema=%r" % _ident(index.table.schema))
+ if index.table.schema
+ else "",
+ "kwargs": ", " + ", ".join(opts) if opts else "",
+ }
+ return text
+
+
+@renderers.dispatch_for(ops.DropIndexOp)
+def _drop_index(autogen_context: AutogenContext, op: ops.DropIndexOp) -> str:
+ index = op.to_index()
+
+ has_batch = autogen_context._has_batch
+
+ if has_batch:
+ tmpl = "%(prefix)sdrop_index(%(name)r%(kwargs)s)"
+ else:
+ tmpl = (
+ "%(prefix)sdrop_index(%(name)r, "
+ "table_name=%(table_name)r%(schema)s%(kwargs)s)"
+ )
+ opts = _render_dialect_kwargs_items(autogen_context, index)
+ text = tmpl % {
+ "prefix": _alembic_autogenerate_prefix(autogen_context),
+ "name": _render_gen_name(autogen_context, op.index_name),
+ "table_name": _ident(op.table_name),
+ "schema": ((", schema=%r" % _ident(op.schema)) if op.schema else ""),
+ "kwargs": ", " + ", ".join(opts) if opts else "",
+ }
+ return text
+
+
+@renderers.dispatch_for(ops.CreateUniqueConstraintOp)
+def _add_unique_constraint(
+ autogen_context: AutogenContext, op: ops.CreateUniqueConstraintOp
+) -> List[str]:
+ return [_uq_constraint(op.to_constraint(), autogen_context, True)]
+
+
+@renderers.dispatch_for(ops.CreateForeignKeyOp)
+def _add_fk_constraint(
+ autogen_context: AutogenContext, op: ops.CreateForeignKeyOp
+) -> str:
+ args = [repr(_render_gen_name(autogen_context, op.constraint_name))]
+ if not autogen_context._has_batch:
+ args.append(repr(_ident(op.source_table)))
+
+ args.extend(
+ [
+ repr(_ident(op.referent_table)),
+ repr([_ident(col) for col in op.local_cols]),
+ repr([_ident(col) for col in op.remote_cols]),
+ ]
+ )
+ kwargs = [
+ "referent_schema",
+ "onupdate",
+ "ondelete",
+ "initially",
+ "deferrable",
+ "use_alter",
+ "match",
+ ]
+ if not autogen_context._has_batch:
+ kwargs.insert(0, "source_schema")
+
+ for k in kwargs:
+ if k in op.kw:
+ value = op.kw[k]
+ if value is not None:
+ args.append("%s=%r" % (k, value))
+
+ return "%(prefix)screate_foreign_key(%(args)s)" % {
+ "prefix": _alembic_autogenerate_prefix(autogen_context),
+ "args": ", ".join(args),
+ }
+
+
+@renderers.dispatch_for(ops.CreatePrimaryKeyOp)
+def _add_pk_constraint(constraint, autogen_context):
+ raise NotImplementedError()
+
+
+@renderers.dispatch_for(ops.CreateCheckConstraintOp)
+def _add_check_constraint(constraint, autogen_context):
+ raise NotImplementedError()
+
+
+@renderers.dispatch_for(ops.DropConstraintOp)
+def _drop_constraint(
+ autogen_context: AutogenContext, op: ops.DropConstraintOp
+) -> str:
+ prefix = _alembic_autogenerate_prefix(autogen_context)
+ name = _render_gen_name(autogen_context, op.constraint_name)
+ schema = _ident(op.schema) if op.schema else None
+ type_ = _ident(op.constraint_type) if op.constraint_type else None
+
+ params_strs = []
+ params_strs.append(repr(name))
+ if not autogen_context._has_batch:
+ params_strs.append(repr(_ident(op.table_name)))
+ if schema is not None:
+ params_strs.append(f"schema={schema!r}")
+ if type_ is not None:
+ params_strs.append(f"type_={type_!r}")
+
+ return f"{prefix}drop_constraint({', '.join(params_strs)})"
+
+
+@renderers.dispatch_for(ops.AddColumnOp)
+def _add_column(autogen_context: AutogenContext, op: ops.AddColumnOp) -> str:
+ schema, tname, column = op.schema, op.table_name, op.column
+ if autogen_context._has_batch:
+ template = "%(prefix)sadd_column(%(column)s)"
+ else:
+ template = "%(prefix)sadd_column(%(tname)r, %(column)s"
+ if schema:
+ template += ", schema=%(schema)r"
+ template += ")"
+ text = template % {
+ "prefix": _alembic_autogenerate_prefix(autogen_context),
+ "tname": tname,
+ "column": _render_column(column, autogen_context),
+ "schema": schema,
+ }
+ return text
+
+
+@renderers.dispatch_for(ops.DropColumnOp)
+def _drop_column(autogen_context: AutogenContext, op: ops.DropColumnOp) -> str:
+ schema, tname, column_name = op.schema, op.table_name, op.column_name
+
+ if autogen_context._has_batch:
+ template = "%(prefix)sdrop_column(%(cname)r)"
+ else:
+ template = "%(prefix)sdrop_column(%(tname)r, %(cname)r"
+ if schema:
+ template += ", schema=%(schema)r"
+ template += ")"
+
+ text = template % {
+ "prefix": _alembic_autogenerate_prefix(autogen_context),
+ "tname": _ident(tname),
+ "cname": _ident(column_name),
+ "schema": _ident(schema),
+ }
+ return text
+
+
+@renderers.dispatch_for(ops.AlterColumnOp)
+def _alter_column(
+ autogen_context: AutogenContext, op: ops.AlterColumnOp
+) -> str:
+ tname = op.table_name
+ cname = op.column_name
+ server_default = op.modify_server_default
+ type_ = op.modify_type
+ nullable = op.modify_nullable
+ comment = op.modify_comment
+ autoincrement = op.kw.get("autoincrement", None)
+ existing_type = op.existing_type
+ existing_nullable = op.existing_nullable
+ existing_comment = op.existing_comment
+ existing_server_default = op.existing_server_default
+ schema = op.schema
+
+ indent = " " * 11
+
+ if autogen_context._has_batch:
+ template = "%(prefix)salter_column(%(cname)r"
+ else:
+ template = "%(prefix)salter_column(%(tname)r, %(cname)r"
+
+ text = template % {
+ "prefix": _alembic_autogenerate_prefix(autogen_context),
+ "tname": tname,
+ "cname": cname,
+ }
+ if existing_type is not None:
+ text += ",\n%sexisting_type=%s" % (
+ indent,
+ _repr_type(existing_type, autogen_context),
+ )
+ if server_default is not False:
+ rendered = _render_server_default(server_default, autogen_context)
+ text += ",\n%sserver_default=%s" % (indent, rendered)
+
+ if type_ is not None:
+ text += ",\n%stype_=%s" % (indent, _repr_type(type_, autogen_context))
+ if nullable is not None:
+ text += ",\n%snullable=%r" % (indent, nullable)
+ if comment is not False:
+ text += ",\n%scomment=%r" % (indent, comment)
+ if existing_comment is not None:
+ text += ",\n%sexisting_comment=%r" % (indent, existing_comment)
+ if nullable is None and existing_nullable is not None:
+ text += ",\n%sexisting_nullable=%r" % (indent, existing_nullable)
+ if autoincrement is not None:
+ text += ",\n%sautoincrement=%r" % (indent, autoincrement)
+ if server_default is False and existing_server_default:
+ rendered = _render_server_default(
+ existing_server_default, autogen_context
+ )
+ text += ",\n%sexisting_server_default=%s" % (indent, rendered)
+ if schema and not autogen_context._has_batch:
+ text += ",\n%sschema=%r" % (indent, schema)
+ text += ")"
+ return text
+
+
+class _f_name:
+ def __init__(self, prefix: str, name: conv) -> None:
+ self.prefix = prefix
+ self.name = name
+
+ def __repr__(self) -> str:
+ return "%sf(%r)" % (self.prefix, _ident(self.name))
+
+
+def _ident(name: Optional[Union[quoted_name, str]]) -> Optional[str]:
+ """produce a __repr__() object for a string identifier that may
+ use quoted_name() in SQLAlchemy 0.9 and greater.
+
+ The issue worked around here is that quoted_name() doesn't have
+ very good repr() behavior by itself when unicode is involved.
+
+ """
+ if name is None:
+ return name
+ elif isinstance(name, quoted_name):
+ return str(name)
+ elif isinstance(name, str):
+ return name
+
+
+def _render_potential_expr(
+ value: Any,
+ autogen_context: AutogenContext,
+ *,
+ wrap_in_text: bool = True,
+ is_server_default: bool = False,
+ is_index: bool = False,
+) -> str:
+ if isinstance(value, sql.ClauseElement):
+ if wrap_in_text:
+ template = "%(prefix)stext(%(sql)r)"
+ else:
+ template = "%(sql)r"
+
+ return template % {
+ "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
+ "sql": autogen_context.migration_context.impl.render_ddl_sql_expr(
+ value, is_server_default=is_server_default, is_index=is_index
+ ),
+ }
+
+ else:
+ return repr(value)
+
+
+def _get_index_rendered_expressions(
+ idx: Index, autogen_context: AutogenContext
+) -> List[str]:
+ return [
+ repr(_ident(getattr(exp, "name", None)))
+ if isinstance(exp, sa_schema.Column)
+ else _render_potential_expr(exp, autogen_context, is_index=True)
+ for exp in idx.expressions
+ ]
+
+
+def _uq_constraint(
+ constraint: UniqueConstraint,
+ autogen_context: AutogenContext,
+ alter: bool,
+) -> str:
+ opts: List[Tuple[str, Any]] = []
+
+ has_batch = autogen_context._has_batch
+
+ if constraint.deferrable:
+ opts.append(("deferrable", str(constraint.deferrable)))
+ if constraint.initially:
+ opts.append(("initially", str(constraint.initially)))
+ if not has_batch and alter and constraint.table.schema:
+ opts.append(("schema", _ident(constraint.table.schema)))
+ if not alter and constraint.name:
+ opts.append(
+ ("name", _render_gen_name(autogen_context, constraint.name))
+ )
+ dialect_options = _render_dialect_kwargs_items(autogen_context, constraint)
+
+ if alter:
+ args = [repr(_render_gen_name(autogen_context, constraint.name))]
+ if not has_batch:
+ args += [repr(_ident(constraint.table.name))]
+ args.append(repr([_ident(col.name) for col in constraint.columns]))
+ args.extend(["%s=%r" % (k, v) for k, v in opts])
+ args.extend(dialect_options)
+ return "%(prefix)screate_unique_constraint(%(args)s)" % {
+ "prefix": _alembic_autogenerate_prefix(autogen_context),
+ "args": ", ".join(args),
+ }
+ else:
+ args = [repr(_ident(col.name)) for col in constraint.columns]
+ args.extend(["%s=%r" % (k, v) for k, v in opts])
+ args.extend(dialect_options)
+ return "%(prefix)sUniqueConstraint(%(args)s)" % {
+ "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
+ "args": ", ".join(args),
+ }
+
+
+def _user_autogenerate_prefix(autogen_context, target):
+ prefix = autogen_context.opts["user_module_prefix"]
+ if prefix is None:
+ return "%s." % target.__module__
+ else:
+ return prefix
+
+
+def _sqlalchemy_autogenerate_prefix(autogen_context: AutogenContext) -> str:
+ return autogen_context.opts["sqlalchemy_module_prefix"] or ""
+
+
+def _alembic_autogenerate_prefix(autogen_context: AutogenContext) -> str:
+ if autogen_context._has_batch:
+ return "batch_op."
+ else:
+ return autogen_context.opts["alembic_module_prefix"] or ""
+
+
+def _user_defined_render(
+ type_: str, object_: Any, autogen_context: AutogenContext
+) -> Union[str, Literal[False]]:
+ if "render_item" in autogen_context.opts:
+ render = autogen_context.opts["render_item"]
+ if render:
+ rendered = render(type_, object_, autogen_context)
+ if rendered is not False:
+ return rendered
+ return False
+
+
+def _render_column(
+ column: Column[Any], autogen_context: AutogenContext
+) -> str:
+ rendered = _user_defined_render("column", column, autogen_context)
+ if rendered is not False:
+ return rendered
+
+ args: List[str] = []
+ opts: List[Tuple[str, Any]] = []
+
+ if column.server_default:
+ rendered = _render_server_default( # type:ignore[assignment]
+ column.server_default, autogen_context
+ )
+ if rendered:
+ if _should_render_server_default_positionally(
+ column.server_default
+ ):
+ args.append(rendered)
+ else:
+ opts.append(("server_default", rendered))
+
+ if (
+ column.autoincrement is not None
+ and column.autoincrement != sqla_compat.AUTOINCREMENT_DEFAULT
+ ):
+ opts.append(("autoincrement", column.autoincrement))
+
+ if column.nullable is not None:
+ opts.append(("nullable", column.nullable))
+
+ if column.system:
+ opts.append(("system", column.system))
+
+ comment = column.comment
+ if comment:
+ opts.append(("comment", "%r" % comment))
+
+ # TODO: for non-ascii colname, assign a "key"
+ return "%(prefix)sColumn(%(name)r, %(type)s, %(args)s%(kwargs)s)" % {
+ "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
+ "name": _ident(column.name),
+ "type": _repr_type(column.type, autogen_context),
+ "args": ", ".join([str(arg) for arg in args]) + ", " if args else "",
+ "kwargs": (
+ ", ".join(
+ ["%s=%s" % (kwname, val) for kwname, val in opts]
+ + [
+ "%s=%s"
+ % (key, _render_potential_expr(val, autogen_context))
+ for key, val in sqla_compat._column_kwargs(column).items()
+ ]
+ )
+ ),
+ }
+
+
+def _should_render_server_default_positionally(server_default: Any) -> bool:
+ return sqla_compat._server_default_is_computed(
+ server_default
+ ) or sqla_compat._server_default_is_identity(server_default)
+
+
+def _render_server_default(
+ default: Optional[
+ Union[FetchedValue, str, TextClause, ColumnElement[Any]]
+ ],
+ autogen_context: AutogenContext,
+ repr_: bool = True,
+) -> Optional[str]:
+ rendered = _user_defined_render("server_default", default, autogen_context)
+ if rendered is not False:
+ return rendered
+
+ if sqla_compat._server_default_is_computed(default):
+ return _render_computed(cast("Computed", default), autogen_context)
+ elif sqla_compat._server_default_is_identity(default):
+ return _render_identity(cast("Identity", default), autogen_context)
+ elif isinstance(default, sa_schema.DefaultClause):
+ if isinstance(default.arg, str):
+ default = default.arg
+ else:
+ return _render_potential_expr(
+ default.arg, autogen_context, is_server_default=True
+ )
+
+ if isinstance(default, str) and repr_:
+ default = repr(re.sub(r"^'|'$", "", default))
+
+ return cast(str, default)
+
+
+def _render_computed(
+ computed: Computed, autogen_context: AutogenContext
+) -> str:
+ text = _render_potential_expr(
+ computed.sqltext, autogen_context, wrap_in_text=False
+ )
+
+ kwargs = {}
+ if computed.persisted is not None:
+ kwargs["persisted"] = computed.persisted
+ return "%(prefix)sComputed(%(text)s, %(kwargs)s)" % {
+ "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
+ "text": text,
+ "kwargs": (", ".join("%s=%s" % pair for pair in kwargs.items())),
+ }
+
+
+def _render_identity(
+ identity: Identity, autogen_context: AutogenContext
+) -> str:
+ kwargs = sqla_compat._get_identity_options_dict(
+ identity, dialect_kwargs=True
+ )
+
+ return "%(prefix)sIdentity(%(kwargs)s)" % {
+ "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
+ "kwargs": (", ".join("%s=%s" % pair for pair in kwargs.items())),
+ }
+
+
+def _repr_type(
+ type_: TypeEngine,
+ autogen_context: AutogenContext,
+ _skip_variants: bool = False,
+) -> str:
+ rendered = _user_defined_render("type", type_, autogen_context)
+ if rendered is not False:
+ return rendered
+
+ if hasattr(autogen_context.migration_context, "impl"):
+ impl_rt = autogen_context.migration_context.impl.render_type(
+ type_, autogen_context
+ )
+ else:
+ impl_rt = None
+
+ mod = type(type_).__module__
+ imports = autogen_context.imports
+ if mod.startswith("sqlalchemy.dialects"):
+ match = re.match(r"sqlalchemy\.dialects\.(\w+)", mod)
+ assert match is not None
+ dname = match.group(1)
+ if imports is not None:
+ imports.add("from sqlalchemy.dialects import %s" % dname)
+ if impl_rt:
+ return impl_rt
+ else:
+ return "%s.%r" % (dname, type_)
+ elif impl_rt:
+ return impl_rt
+ elif not _skip_variants and sqla_compat._type_has_variants(type_):
+ return _render_Variant_type(type_, autogen_context)
+ elif mod.startswith("sqlalchemy."):
+ if "_render_%s_type" % type_.__visit_name__ in globals():
+ fn = globals()["_render_%s_type" % type_.__visit_name__]
+ return fn(type_, autogen_context)
+ else:
+ prefix = _sqlalchemy_autogenerate_prefix(autogen_context)
+ return "%s%r" % (prefix, type_)
+ else:
+ prefix = _user_autogenerate_prefix(autogen_context, type_)
+ return "%s%r" % (prefix, type_)
+
+
+def _render_ARRAY_type(type_: ARRAY, autogen_context: AutogenContext) -> str:
+ return cast(
+ str,
+ _render_type_w_subtype(
+ type_, autogen_context, "item_type", r"(.+?\()"
+ ),
+ )
+
+
+def _render_Variant_type(
+ type_: TypeEngine, autogen_context: AutogenContext
+) -> str:
+ base_type, variant_mapping = sqla_compat._get_variant_mapping(type_)
+ base = _repr_type(base_type, autogen_context, _skip_variants=True)
+ assert base is not None and base is not False
+ for dialect in sorted(variant_mapping):
+ typ = variant_mapping[dialect]
+ base += ".with_variant(%s, %r)" % (
+ _repr_type(typ, autogen_context, _skip_variants=True),
+ dialect,
+ )
+ return base
+
+
+def _render_type_w_subtype(
+ type_: TypeEngine,
+ autogen_context: AutogenContext,
+ attrname: str,
+ regexp: str,
+ prefix: Optional[str] = None,
+) -> Union[Optional[str], Literal[False]]:
+ outer_repr = repr(type_)
+ inner_type = getattr(type_, attrname, None)
+ if inner_type is None:
+ return False
+
+ inner_repr = repr(inner_type)
+
+ inner_repr = re.sub(r"([\(\)])", r"\\\1", inner_repr)
+ sub_type = _repr_type(getattr(type_, attrname), autogen_context)
+ outer_type = re.sub(regexp + inner_repr, r"\1%s" % sub_type, outer_repr)
+
+ if prefix:
+ return "%s%s" % (prefix, outer_type)
+
+ mod = type(type_).__module__
+ if mod.startswith("sqlalchemy.dialects"):
+ match = re.match(r"sqlalchemy\.dialects\.(\w+)", mod)
+ assert match is not None
+ dname = match.group(1)
+ return "%s.%s" % (dname, outer_type)
+ elif mod.startswith("sqlalchemy"):
+ prefix = _sqlalchemy_autogenerate_prefix(autogen_context)
+ return "%s%s" % (prefix, outer_type)
+ else:
+ return None
+
+
+_constraint_renderers = util.Dispatcher()
+
+
+def _render_constraint(
+ constraint: Constraint,
+ autogen_context: AutogenContext,
+ namespace_metadata: Optional[MetaData],
+) -> Optional[str]:
+ try:
+ renderer = _constraint_renderers.dispatch(constraint)
+ except ValueError:
+ util.warn("No renderer is established for object %r" % constraint)
+ return "[Unknown Python object %r]" % constraint
+ else:
+ return renderer(constraint, autogen_context, namespace_metadata)
+
+
+@_constraint_renderers.dispatch_for(sa_schema.PrimaryKeyConstraint)
+def _render_primary_key(
+ constraint: PrimaryKeyConstraint,
+ autogen_context: AutogenContext,
+ namespace_metadata: Optional[MetaData],
+) -> Optional[str]:
+ rendered = _user_defined_render("primary_key", constraint, autogen_context)
+ if rendered is not False:
+ return rendered
+
+ if not constraint.columns:
+ return None
+
+ opts = []
+ if constraint.name:
+ opts.append(
+ ("name", repr(_render_gen_name(autogen_context, constraint.name)))
+ )
+ return "%(prefix)sPrimaryKeyConstraint(%(args)s)" % {
+ "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
+ "args": ", ".join(
+ [repr(c.name) for c in constraint.columns]
+ + ["%s=%s" % (kwname, val) for kwname, val in opts]
+ ),
+ }
+
+
+def _fk_colspec(
+ fk: ForeignKey,
+ metadata_schema: Optional[str],
+ namespace_metadata: MetaData,
+) -> str:
+ """Implement a 'safe' version of ForeignKey._get_colspec() that
+ won't fail if the remote table can't be resolved.
+
+ """
+ colspec = fk._get_colspec() # type:ignore[attr-defined]
+ tokens = colspec.split(".")
+ tname, colname = tokens[-2:]
+
+ if metadata_schema is not None and len(tokens) == 2:
+ table_fullname = "%s.%s" % (metadata_schema, tname)
+ else:
+ table_fullname = ".".join(tokens[0:-1])
+
+ if (
+ not fk.link_to_name
+ and fk.parent is not None
+ and fk.parent.table is not None
+ ):
+ # try to resolve the remote table in order to adjust for column.key.
+ # the FK constraint needs to be rendered in terms of the column
+ # name.
+
+ if table_fullname in namespace_metadata.tables:
+ col = namespace_metadata.tables[table_fullname].c.get(colname)
+ if col is not None:
+ colname = _ident(col.name) # type: ignore[assignment]
+
+ colspec = "%s.%s" % (table_fullname, colname)
+
+ return colspec
+
+
+def _populate_render_fk_opts(
+ constraint: ForeignKeyConstraint, opts: List[Tuple[str, str]]
+) -> None:
+ if constraint.onupdate:
+ opts.append(("onupdate", repr(constraint.onupdate)))
+ if constraint.ondelete:
+ opts.append(("ondelete", repr(constraint.ondelete)))
+ if constraint.initially:
+ opts.append(("initially", repr(constraint.initially)))
+ if constraint.deferrable:
+ opts.append(("deferrable", repr(constraint.deferrable)))
+ if constraint.use_alter:
+ opts.append(("use_alter", repr(constraint.use_alter)))
+ if constraint.match:
+ opts.append(("match", repr(constraint.match)))
+
+
+@_constraint_renderers.dispatch_for(sa_schema.ForeignKeyConstraint)
+def _render_foreign_key(
+ constraint: ForeignKeyConstraint,
+ autogen_context: AutogenContext,
+ namespace_metadata: MetaData,
+) -> Optional[str]:
+ rendered = _user_defined_render("foreign_key", constraint, autogen_context)
+ if rendered is not False:
+ return rendered
+
+ opts = []
+ if constraint.name:
+ opts.append(
+ ("name", repr(_render_gen_name(autogen_context, constraint.name)))
+ )
+
+ _populate_render_fk_opts(constraint, opts)
+
+ apply_metadata_schema = namespace_metadata.schema
+ return (
+ "%(prefix)sForeignKeyConstraint([%(cols)s], "
+ "[%(refcols)s], %(args)s)"
+ % {
+ "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
+ "cols": ", ".join(
+ "%r" % _ident(cast("Column", f.parent).name)
+ for f in constraint.elements
+ ),
+ "refcols": ", ".join(
+ repr(_fk_colspec(f, apply_metadata_schema, namespace_metadata))
+ for f in constraint.elements
+ ),
+ "args": ", ".join(
+ ["%s=%s" % (kwname, val) for kwname, val in opts]
+ ),
+ }
+ )
+
+
+@_constraint_renderers.dispatch_for(sa_schema.UniqueConstraint)
+def _render_unique_constraint(
+ constraint: UniqueConstraint,
+ autogen_context: AutogenContext,
+ namespace_metadata: Optional[MetaData],
+) -> str:
+ rendered = _user_defined_render("unique", constraint, autogen_context)
+ if rendered is not False:
+ return rendered
+
+ return _uq_constraint(constraint, autogen_context, False)
+
+
+@_constraint_renderers.dispatch_for(sa_schema.CheckConstraint)
+def _render_check_constraint(
+ constraint: CheckConstraint,
+ autogen_context: AutogenContext,
+ namespace_metadata: Optional[MetaData],
+) -> Optional[str]:
+ rendered = _user_defined_render("check", constraint, autogen_context)
+ if rendered is not False:
+ return rendered
+
+ # detect the constraint being part of
+ # a parent type which is probably in the Table already.
+ # ideally SQLAlchemy would give us more of a first class
+ # way to detect this.
+ if (
+ constraint._create_rule # type:ignore[attr-defined]
+ and hasattr(
+ constraint._create_rule, "target" # type:ignore[attr-defined]
+ )
+ and isinstance(
+ constraint._create_rule.target, # type:ignore[attr-defined]
+ sqltypes.TypeEngine,
+ )
+ ):
+ return None
+ opts = []
+ if constraint.name:
+ opts.append(
+ ("name", repr(_render_gen_name(autogen_context, constraint.name)))
+ )
+ return "%(prefix)sCheckConstraint(%(sqltext)s%(opts)s)" % {
+ "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
+ "opts": ", " + (", ".join("%s=%s" % (k, v) for k, v in opts))
+ if opts
+ else "",
+ "sqltext": _render_potential_expr(
+ constraint.sqltext, autogen_context, wrap_in_text=False
+ ),
+ }
+
+
+@renderers.dispatch_for(ops.ExecuteSQLOp)
+def _execute_sql(autogen_context: AutogenContext, op: ops.ExecuteSQLOp) -> str:
+ if not isinstance(op.sqltext, str):
+ raise NotImplementedError(
+ "Autogenerate rendering of SQL Expression language constructs "
+ "not supported here; please use a plain SQL string"
+ )
+ return "op.execute(%r)" % op.sqltext
+
+
+renderers = default_renderers.branch()
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/rewriter.py b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/rewriter.py
new file mode 100644
index 00000000..68a93dd0
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/autogenerate/rewriter.py
@@ -0,0 +1,227 @@
+from __future__ import annotations
+
+from typing import Any
+from typing import Callable
+from typing import Iterator
+from typing import List
+from typing import Optional
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import Union
+
+from .. import util
+from ..operations import ops
+
+if TYPE_CHECKING:
+ from ..operations.ops import AddColumnOp
+ from ..operations.ops import AlterColumnOp
+ from ..operations.ops import CreateTableOp
+ from ..operations.ops import MigrateOperation
+ from ..operations.ops import MigrationScript
+ from ..operations.ops import ModifyTableOps
+ from ..operations.ops import OpContainer
+ from ..runtime.environment import _GetRevArg
+ from ..runtime.migration import MigrationContext
+
+
+class Rewriter:
+ """A helper object that allows easy 'rewriting' of ops streams.
+
+ The :class:`.Rewriter` object is intended to be passed along
+ to the
+ :paramref:`.EnvironmentContext.configure.process_revision_directives`
+ parameter in an ``env.py`` script. Once constructed, any number
+ of "rewrites" functions can be associated with it, which will be given
+ the opportunity to modify the structure without having to have explicit
+ knowledge of the overall structure.
+
+ The function is passed the :class:`.MigrationContext` object and
+ ``revision`` tuple that are passed to the :paramref:`.Environment
+ Context.configure.process_revision_directives` function normally,
+ and the third argument is an individual directive of the type
+ noted in the decorator. The function has the choice of returning
+ a single op directive, which normally can be the directive that
+ was actually passed, or a new directive to replace it, or a list
+ of zero or more directives to replace it.
+
+ .. seealso::
+
+ :ref:`autogen_rewriter` - usage example
+
+ """
+
+ _traverse = util.Dispatcher()
+
+ _chained: Optional[Rewriter] = None
+
+ def __init__(self) -> None:
+ self.dispatch = util.Dispatcher()
+
+ def chain(self, other: Rewriter) -> Rewriter:
+ """Produce a "chain" of this :class:`.Rewriter` to another.
+
+ This allows two rewriters to operate serially on a stream,
+ e.g.::
+
+ writer1 = autogenerate.Rewriter()
+ writer2 = autogenerate.Rewriter()
+
+
+ @writer1.rewrites(ops.AddColumnOp)
+ def add_column_nullable(context, revision, op):
+ op.column.nullable = True
+ return op
+
+
+ @writer2.rewrites(ops.AddColumnOp)
+ def add_column_idx(context, revision, op):
+ idx_op = ops.CreateIndexOp(
+ "ixc", op.table_name, [op.column.name]
+ )
+ return [op, idx_op]
+
+ writer = writer1.chain(writer2)
+
+ :param other: a :class:`.Rewriter` instance
+ :return: a new :class:`.Rewriter` that will run the operations
+ of this writer, then the "other" writer, in succession.
+
+ """
+ wr = self.__class__.__new__(self.__class__)
+ wr.__dict__.update(self.__dict__)
+ wr._chained = other
+ return wr
+
+ def rewrites(
+ self,
+ operator: Union[
+ Type[AddColumnOp],
+ Type[MigrateOperation],
+ Type[AlterColumnOp],
+ Type[CreateTableOp],
+ Type[ModifyTableOps],
+ ],
+ ) -> Callable:
+ """Register a function as rewriter for a given type.
+
+ The function should receive three arguments, which are
+ the :class:`.MigrationContext`, a ``revision`` tuple, and
+ an op directive of the type indicated. E.g.::
+
+ @writer1.rewrites(ops.AddColumnOp)
+ def add_column_nullable(context, revision, op):
+ op.column.nullable = True
+ return op
+
+ """
+ return self.dispatch.dispatch_for(operator)
+
+ def _rewrite(
+ self,
+ context: MigrationContext,
+ revision: _GetRevArg,
+ directive: MigrateOperation,
+ ) -> Iterator[MigrateOperation]:
+ try:
+ _rewriter = self.dispatch.dispatch(directive)
+ except ValueError:
+ _rewriter = None
+ yield directive
+ else:
+ if self in directive._mutations:
+ yield directive
+ else:
+ for r_directive in util.to_list(
+ _rewriter(context, revision, directive), []
+ ):
+ r_directive._mutations = r_directive._mutations.union(
+ [self]
+ )
+ yield r_directive
+
+ def __call__(
+ self,
+ context: MigrationContext,
+ revision: _GetRevArg,
+ directives: List[MigrationScript],
+ ) -> None:
+ self.process_revision_directives(context, revision, directives)
+ if self._chained:
+ self._chained(context, revision, directives)
+
+ @_traverse.dispatch_for(ops.MigrationScript)
+ def _traverse_script(
+ self,
+ context: MigrationContext,
+ revision: _GetRevArg,
+ directive: MigrationScript,
+ ) -> None:
+ upgrade_ops_list = []
+ for upgrade_ops in directive.upgrade_ops_list:
+ ret = self._traverse_for(context, revision, upgrade_ops)
+ if len(ret) != 1:
+ raise ValueError(
+ "Can only return single object for UpgradeOps traverse"
+ )
+ upgrade_ops_list.append(ret[0])
+ directive.upgrade_ops = upgrade_ops_list
+
+ downgrade_ops_list = []
+ for downgrade_ops in directive.downgrade_ops_list:
+ ret = self._traverse_for(context, revision, downgrade_ops)
+ if len(ret) != 1:
+ raise ValueError(
+ "Can only return single object for DowngradeOps traverse"
+ )
+ downgrade_ops_list.append(ret[0])
+ directive.downgrade_ops = downgrade_ops_list
+
+ @_traverse.dispatch_for(ops.OpContainer)
+ def _traverse_op_container(
+ self,
+ context: MigrationContext,
+ revision: _GetRevArg,
+ directive: OpContainer,
+ ) -> None:
+ self._traverse_list(context, revision, directive.ops)
+
+ @_traverse.dispatch_for(ops.MigrateOperation)
+ def _traverse_any_directive(
+ self,
+ context: MigrationContext,
+ revision: _GetRevArg,
+ directive: MigrateOperation,
+ ) -> None:
+ pass
+
+ def _traverse_for(
+ self,
+ context: MigrationContext,
+ revision: _GetRevArg,
+ directive: MigrateOperation,
+ ) -> Any:
+ directives = list(self._rewrite(context, revision, directive))
+ for directive in directives:
+ traverser = self._traverse.dispatch(directive)
+ traverser(self, context, revision, directive)
+ return directives
+
+ def _traverse_list(
+ self,
+ context: MigrationContext,
+ revision: _GetRevArg,
+ directives: Any,
+ ) -> None:
+ dest = []
+ for directive in directives:
+ dest.extend(self._traverse_for(context, revision, directive))
+
+ directives[:] = dest
+
+ def process_revision_directives(
+ self,
+ context: MigrationContext,
+ revision: _GetRevArg,
+ directives: List[MigrationScript],
+ ) -> None:
+ self._traverse_list(context, revision, directives)
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/command.py b/Backend/venv/lib/python3.12/site-packages/alembic/command.py
new file mode 100644
index 00000000..dbaa9cf9
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/command.py
@@ -0,0 +1,744 @@
+from __future__ import annotations
+
+import os
+from typing import List
+from typing import Optional
+from typing import TYPE_CHECKING
+from typing import Union
+
+from . import autogenerate as autogen
+from . import util
+from .runtime.environment import EnvironmentContext
+from .script import ScriptDirectory
+
+if TYPE_CHECKING:
+ from alembic.config import Config
+ from alembic.script.base import Script
+ from alembic.script.revision import _RevIdType
+ from .runtime.environment import ProcessRevisionDirectiveFn
+
+
+def list_templates(config: Config):
+ """List available templates.
+
+ :param config: a :class:`.Config` object.
+
+ """
+
+ config.print_stdout("Available templates:\n")
+ for tempname in os.listdir(config.get_template_directory()):
+ with open(
+ os.path.join(config.get_template_directory(), tempname, "README")
+ ) as readme:
+ synopsis = next(readme).rstrip()
+ config.print_stdout("%s - %s", tempname, synopsis)
+
+ config.print_stdout("\nTemplates are used via the 'init' command, e.g.:")
+ config.print_stdout("\n alembic init --template generic ./scripts")
+
+
+def init(
+ config: Config,
+ directory: str,
+ template: str = "generic",
+ package: bool = False,
+) -> None:
+ """Initialize a new scripts directory.
+
+ :param config: a :class:`.Config` object.
+
+ :param directory: string path of the target directory
+
+ :param template: string name of the migration environment template to
+ use.
+
+ :param package: when True, write ``__init__.py`` files into the
+ environment location as well as the versions/ location.
+
+ """
+
+ if os.access(directory, os.F_OK) and os.listdir(directory):
+ raise util.CommandError(
+ "Directory %s already exists and is not empty" % directory
+ )
+
+ template_dir = os.path.join(config.get_template_directory(), template)
+ if not os.access(template_dir, os.F_OK):
+ raise util.CommandError("No such template %r" % template)
+
+ if not os.access(directory, os.F_OK):
+ with util.status(
+ f"Creating directory {os.path.abspath(directory)!r}",
+ **config.messaging_opts,
+ ):
+ os.makedirs(directory)
+
+ versions = os.path.join(directory, "versions")
+ with util.status(
+ f"Creating directory {os.path.abspath(versions)!r}",
+ **config.messaging_opts,
+ ):
+ os.makedirs(versions)
+
+ script = ScriptDirectory(directory)
+
+ config_file: str | None = None
+ for file_ in os.listdir(template_dir):
+ file_path = os.path.join(template_dir, file_)
+ if file_ == "alembic.ini.mako":
+ assert config.config_file_name is not None
+ config_file = os.path.abspath(config.config_file_name)
+ if os.access(config_file, os.F_OK):
+ util.msg(
+ f"File {config_file!r} already exists, skipping",
+ **config.messaging_opts,
+ )
+ else:
+ script._generate_template(
+ file_path, config_file, script_location=directory
+ )
+ elif os.path.isfile(file_path):
+ output_file = os.path.join(directory, file_)
+ script._copy_file(file_path, output_file)
+
+ if package:
+ for path in [
+ os.path.join(os.path.abspath(directory), "__init__.py"),
+ os.path.join(os.path.abspath(versions), "__init__.py"),
+ ]:
+ with util.status(f"Adding {path!r}", **config.messaging_opts):
+ with open(path, "w"):
+ pass
+
+ assert config_file is not None
+ util.msg(
+ "Please edit configuration/connection/logging "
+ f"settings in {config_file!r} before proceeding.",
+ **config.messaging_opts,
+ )
+
+
+def revision(
+ config: Config,
+ message: Optional[str] = None,
+ autogenerate: bool = False,
+ sql: bool = False,
+ head: str = "head",
+ splice: bool = False,
+ branch_label: Optional[_RevIdType] = None,
+ version_path: Optional[str] = None,
+ rev_id: Optional[str] = None,
+ depends_on: Optional[str] = None,
+ process_revision_directives: Optional[ProcessRevisionDirectiveFn] = None,
+) -> Union[Optional[Script], List[Optional[Script]]]:
+ """Create a new revision file.
+
+ :param config: a :class:`.Config` object.
+
+ :param message: string message to apply to the revision; this is the
+ ``-m`` option to ``alembic revision``.
+
+ :param autogenerate: whether or not to autogenerate the script from
+ the database; this is the ``--autogenerate`` option to
+ ``alembic revision``.
+
+ :param sql: whether to dump the script out as a SQL string; when specified,
+ the script is dumped to stdout. This is the ``--sql`` option to
+ ``alembic revision``.
+
+ :param head: head revision to build the new revision upon as a parent;
+ this is the ``--head`` option to ``alembic revision``.
+
+ :param splice: whether or not the new revision should be made into a
+ new head of its own; is required when the given ``head`` is not itself
+ a head. This is the ``--splice`` option to ``alembic revision``.
+
+ :param branch_label: string label to apply to the branch; this is the
+ ``--branch-label`` option to ``alembic revision``.
+
+ :param version_path: string symbol identifying a specific version path
+ from the configuration; this is the ``--version-path`` option to
+ ``alembic revision``.
+
+ :param rev_id: optional revision identifier to use instead of having
+ one generated; this is the ``--rev-id`` option to ``alembic revision``.
+
+ :param depends_on: optional list of "depends on" identifiers; this is the
+ ``--depends-on`` option to ``alembic revision``.
+
+ :param process_revision_directives: this is a callable that takes the
+ same form as the callable described at
+ :paramref:`.EnvironmentContext.configure.process_revision_directives`;
+ will be applied to the structure generated by the revision process
+ where it can be altered programmatically. Note that unlike all
+ the other parameters, this option is only available via programmatic
+ use of :func:`.command.revision`
+
+ """
+
+ script_directory = ScriptDirectory.from_config(config)
+
+ command_args = dict(
+ message=message,
+ autogenerate=autogenerate,
+ sql=sql,
+ head=head,
+ splice=splice,
+ branch_label=branch_label,
+ version_path=version_path,
+ rev_id=rev_id,
+ depends_on=depends_on,
+ )
+ revision_context = autogen.RevisionContext(
+ config,
+ script_directory,
+ command_args,
+ process_revision_directives=process_revision_directives,
+ )
+
+ environment = util.asbool(config.get_main_option("revision_environment"))
+
+ if autogenerate:
+ environment = True
+
+ if sql:
+ raise util.CommandError(
+ "Using --sql with --autogenerate does not make any sense"
+ )
+
+ def retrieve_migrations(rev, context):
+ revision_context.run_autogenerate(rev, context)
+ return []
+
+ elif environment:
+
+ def retrieve_migrations(rev, context):
+ revision_context.run_no_autogenerate(rev, context)
+ return []
+
+ elif sql:
+ raise util.CommandError(
+ "Using --sql with the revision command when "
+ "revision_environment is not configured does not make any sense"
+ )
+
+ if environment:
+ with EnvironmentContext(
+ config,
+ script_directory,
+ fn=retrieve_migrations,
+ as_sql=sql,
+ template_args=revision_context.template_args,
+ revision_context=revision_context,
+ ):
+ script_directory.run_env()
+
+ # the revision_context now has MigrationScript structure(s) present.
+ # these could theoretically be further processed / rewritten *here*,
+ # in addition to the hooks present within each run_migrations() call,
+ # or at the end of env.py run_migrations_online().
+
+ scripts = [script for script in revision_context.generate_scripts()]
+ if len(scripts) == 1:
+ return scripts[0]
+ else:
+ return scripts
+
+
+def check(config: "Config") -> None:
+ """Check if revision command with autogenerate has pending upgrade ops.
+
+ :param config: a :class:`.Config` object.
+
+ .. versionadded:: 1.9.0
+
+ """
+
+ script_directory = ScriptDirectory.from_config(config)
+
+ command_args = dict(
+ message=None,
+ autogenerate=True,
+ sql=False,
+ head="head",
+ splice=False,
+ branch_label=None,
+ version_path=None,
+ rev_id=None,
+ depends_on=None,
+ )
+ revision_context = autogen.RevisionContext(
+ config,
+ script_directory,
+ command_args,
+ )
+
+ def retrieve_migrations(rev, context):
+ revision_context.run_autogenerate(rev, context)
+ return []
+
+ with EnvironmentContext(
+ config,
+ script_directory,
+ fn=retrieve_migrations,
+ as_sql=False,
+ template_args=revision_context.template_args,
+ revision_context=revision_context,
+ ):
+ script_directory.run_env()
+
+ # the revision_context now has MigrationScript structure(s) present.
+
+ migration_script = revision_context.generated_revisions[-1]
+ diffs = migration_script.upgrade_ops.as_diffs()
+ if diffs:
+ raise util.AutogenerateDiffsDetected(
+ f"New upgrade operations detected: {diffs}"
+ )
+ else:
+ config.print_stdout("No new upgrade operations detected.")
+
+
+def merge(
+ config: Config,
+ revisions: _RevIdType,
+ message: Optional[str] = None,
+ branch_label: Optional[_RevIdType] = None,
+ rev_id: Optional[str] = None,
+) -> Optional[Script]:
+ """Merge two revisions together. Creates a new migration file.
+
+ :param config: a :class:`.Config` instance
+
+ :param message: string message to apply to the revision
+
+ :param branch_label: string label name to apply to the new revision
+
+ :param rev_id: hardcoded revision identifier instead of generating a new
+ one.
+
+ .. seealso::
+
+ :ref:`branches`
+
+ """
+
+ script = ScriptDirectory.from_config(config)
+ template_args = {
+ "config": config # Let templates use config for
+ # e.g. multiple databases
+ }
+
+ environment = util.asbool(config.get_main_option("revision_environment"))
+
+ if environment:
+
+ def nothing(rev, context):
+ return []
+
+ with EnvironmentContext(
+ config,
+ script,
+ fn=nothing,
+ as_sql=False,
+ template_args=template_args,
+ ):
+ script.run_env()
+
+ return script.generate_revision(
+ rev_id or util.rev_id(),
+ message,
+ refresh=True,
+ head=revisions,
+ branch_labels=branch_label,
+ **template_args, # type:ignore[arg-type]
+ )
+
+
+def upgrade(
+ config: Config,
+ revision: str,
+ sql: bool = False,
+ tag: Optional[str] = None,
+) -> None:
+ """Upgrade to a later version.
+
+ :param config: a :class:`.Config` instance.
+
+ :param revision: string revision target or range for --sql mode
+
+ :param sql: if True, use ``--sql`` mode
+
+ :param tag: an arbitrary "tag" that can be intercepted by custom
+ ``env.py`` scripts via the :meth:`.EnvironmentContext.get_tag_argument`
+ method.
+
+ """
+
+ script = ScriptDirectory.from_config(config)
+
+ starting_rev = None
+ if ":" in revision:
+ if not sql:
+ raise util.CommandError("Range revision not allowed")
+ starting_rev, revision = revision.split(":", 2)
+
+ def upgrade(rev, context):
+ return script._upgrade_revs(revision, rev)
+
+ with EnvironmentContext(
+ config,
+ script,
+ fn=upgrade,
+ as_sql=sql,
+ starting_rev=starting_rev,
+ destination_rev=revision,
+ tag=tag,
+ ):
+ script.run_env()
+
+
+def downgrade(
+ config: Config,
+ revision: str,
+ sql: bool = False,
+ tag: Optional[str] = None,
+) -> None:
+ """Revert to a previous version.
+
+ :param config: a :class:`.Config` instance.
+
+ :param revision: string revision target or range for --sql mode
+
+ :param sql: if True, use ``--sql`` mode
+
+ :param tag: an arbitrary "tag" that can be intercepted by custom
+ ``env.py`` scripts via the :meth:`.EnvironmentContext.get_tag_argument`
+ method.
+
+ """
+
+ script = ScriptDirectory.from_config(config)
+ starting_rev = None
+ if ":" in revision:
+ if not sql:
+ raise util.CommandError("Range revision not allowed")
+ starting_rev, revision = revision.split(":", 2)
+ elif sql:
+ raise util.CommandError(
+ "downgrade with --sql requires :"
+ )
+
+ def downgrade(rev, context):
+ return script._downgrade_revs(revision, rev)
+
+ with EnvironmentContext(
+ config,
+ script,
+ fn=downgrade,
+ as_sql=sql,
+ starting_rev=starting_rev,
+ destination_rev=revision,
+ tag=tag,
+ ):
+ script.run_env()
+
+
+def show(config, rev):
+ """Show the revision(s) denoted by the given symbol.
+
+ :param config: a :class:`.Config` instance.
+
+ :param revision: string revision target
+
+ """
+
+ script = ScriptDirectory.from_config(config)
+
+ if rev == "current":
+
+ def show_current(rev, context):
+ for sc in script.get_revisions(rev):
+ config.print_stdout(sc.log_entry)
+ return []
+
+ with EnvironmentContext(config, script, fn=show_current):
+ script.run_env()
+ else:
+ for sc in script.get_revisions(rev):
+ config.print_stdout(sc.log_entry)
+
+
+def history(
+ config: Config,
+ rev_range: Optional[str] = None,
+ verbose: bool = False,
+ indicate_current: bool = False,
+) -> None:
+ """List changeset scripts in chronological order.
+
+ :param config: a :class:`.Config` instance.
+
+ :param rev_range: string revision range
+
+ :param verbose: output in verbose mode.
+
+ :param indicate_current: indicate current revision.
+
+ """
+ base: Optional[str]
+ head: Optional[str]
+ script = ScriptDirectory.from_config(config)
+ if rev_range is not None:
+ if ":" not in rev_range:
+ raise util.CommandError(
+ "History range requires [start]:[end], " "[start]:, or :[end]"
+ )
+ base, head = rev_range.strip().split(":")
+ else:
+ base = head = None
+
+ environment = (
+ util.asbool(config.get_main_option("revision_environment"))
+ or indicate_current
+ )
+
+ def _display_history(config, script, base, head, currents=()):
+ for sc in script.walk_revisions(
+ base=base or "base", head=head or "heads"
+ ):
+ if indicate_current:
+ sc._db_current_indicator = sc.revision in currents
+
+ config.print_stdout(
+ sc.cmd_format(
+ verbose=verbose,
+ include_branches=True,
+ include_doc=True,
+ include_parents=True,
+ )
+ )
+
+ def _display_history_w_current(config, script, base, head):
+ def _display_current_history(rev, context):
+ if head == "current":
+ _display_history(config, script, base, rev, rev)
+ elif base == "current":
+ _display_history(config, script, rev, head, rev)
+ else:
+ _display_history(config, script, base, head, rev)
+ return []
+
+ with EnvironmentContext(config, script, fn=_display_current_history):
+ script.run_env()
+
+ if base == "current" or head == "current" or environment:
+ _display_history_w_current(config, script, base, head)
+ else:
+ _display_history(config, script, base, head)
+
+
+def heads(config, verbose=False, resolve_dependencies=False):
+ """Show current available heads in the script directory.
+
+ :param config: a :class:`.Config` instance.
+
+ :param verbose: output in verbose mode.
+
+ :param resolve_dependencies: treat dependency version as down revisions.
+
+ """
+
+ script = ScriptDirectory.from_config(config)
+ if resolve_dependencies:
+ heads = script.get_revisions("heads")
+ else:
+ heads = script.get_revisions(script.get_heads())
+
+ for rev in heads:
+ config.print_stdout(
+ rev.cmd_format(
+ verbose, include_branches=True, tree_indicators=False
+ )
+ )
+
+
+def branches(config, verbose=False):
+ """Show current branch points.
+
+ :param config: a :class:`.Config` instance.
+
+ :param verbose: output in verbose mode.
+
+ """
+ script = ScriptDirectory.from_config(config)
+ for sc in script.walk_revisions():
+ if sc.is_branch_point:
+ config.print_stdout(
+ "%s\n%s\n",
+ sc.cmd_format(verbose, include_branches=True),
+ "\n".join(
+ "%s -> %s"
+ % (
+ " " * len(str(sc.revision)),
+ rev_obj.cmd_format(
+ False, include_branches=True, include_doc=verbose
+ ),
+ )
+ for rev_obj in (
+ script.get_revision(rev) for rev in sc.nextrev
+ )
+ ),
+ )
+
+
+def current(config: Config, verbose: bool = False) -> None:
+ """Display the current revision for a database.
+
+ :param config: a :class:`.Config` instance.
+
+ :param verbose: output in verbose mode.
+
+ """
+
+ script = ScriptDirectory.from_config(config)
+
+ def display_version(rev, context):
+ if verbose:
+ config.print_stdout(
+ "Current revision(s) for %s:",
+ util.obfuscate_url_pw(context.connection.engine.url),
+ )
+ for rev in script.get_all_current(rev):
+ config.print_stdout(rev.cmd_format(verbose))
+
+ return []
+
+ with EnvironmentContext(
+ config, script, fn=display_version, dont_mutate=True
+ ):
+ script.run_env()
+
+
+def stamp(
+ config: Config,
+ revision: _RevIdType,
+ sql: bool = False,
+ tag: Optional[str] = None,
+ purge: bool = False,
+) -> None:
+ """'stamp' the revision table with the given revision; don't
+ run any migrations.
+
+ :param config: a :class:`.Config` instance.
+
+ :param revision: target revision or list of revisions. May be a list
+ to indicate stamping of multiple branch heads.
+
+ .. note:: this parameter is called "revisions" in the command line
+ interface.
+
+ :param sql: use ``--sql`` mode
+
+ :param tag: an arbitrary "tag" that can be intercepted by custom
+ ``env.py`` scripts via the :class:`.EnvironmentContext.get_tag_argument`
+ method.
+
+ :param purge: delete all entries in the version table before stamping.
+
+ """
+
+ script = ScriptDirectory.from_config(config)
+
+ if sql:
+ destination_revs = []
+ starting_rev = None
+ for _revision in util.to_list(revision):
+ if ":" in _revision:
+ srev, _revision = _revision.split(":", 2)
+
+ if starting_rev != srev:
+ if starting_rev is None:
+ starting_rev = srev
+ else:
+ raise util.CommandError(
+ "Stamp operation with --sql only supports a "
+ "single starting revision at a time"
+ )
+ destination_revs.append(_revision)
+ else:
+ destination_revs = util.to_list(revision)
+
+ def do_stamp(rev, context):
+ return script._stamp_revs(util.to_tuple(destination_revs), rev)
+
+ with EnvironmentContext(
+ config,
+ script,
+ fn=do_stamp,
+ as_sql=sql,
+ starting_rev=starting_rev if sql else None,
+ destination_rev=util.to_tuple(destination_revs),
+ tag=tag,
+ purge=purge,
+ ):
+ script.run_env()
+
+
+def edit(config: Config, rev: str) -> None:
+ """Edit revision script(s) using $EDITOR.
+
+ :param config: a :class:`.Config` instance.
+
+ :param rev: target revision.
+
+ """
+
+ script = ScriptDirectory.from_config(config)
+
+ if rev == "current":
+
+ def edit_current(rev, context):
+ if not rev:
+ raise util.CommandError("No current revisions")
+ for sc in script.get_revisions(rev):
+ util.open_in_editor(sc.path)
+ return []
+
+ with EnvironmentContext(config, script, fn=edit_current):
+ script.run_env()
+ else:
+ revs = script.get_revisions(rev)
+ if not revs:
+ raise util.CommandError(
+ "No revision files indicated by symbol '%s'" % rev
+ )
+ for sc in revs:
+ assert sc
+ util.open_in_editor(sc.path)
+
+
+def ensure_version(config: Config, sql: bool = False) -> None:
+ """Create the alembic version table if it doesn't exist already .
+
+ :param config: a :class:`.Config` instance.
+
+ :param sql: use ``--sql`` mode
+
+ .. versionadded:: 1.7.6
+
+ """
+
+ script = ScriptDirectory.from_config(config)
+
+ def do_ensure_version(rev, context):
+ context._ensure_version_table()
+ return []
+
+ with EnvironmentContext(
+ config,
+ script,
+ fn=do_ensure_version,
+ as_sql=sql,
+ ):
+ script.run_env()
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/config.py b/Backend/venv/lib/python3.12/site-packages/alembic/config.py
new file mode 100644
index 00000000..55b5811a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/config.py
@@ -0,0 +1,634 @@
+from __future__ import annotations
+
+from argparse import ArgumentParser
+from argparse import Namespace
+from configparser import ConfigParser
+import inspect
+import os
+import sys
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import Mapping
+from typing import Optional
+from typing import overload
+from typing import TextIO
+from typing import Union
+
+from typing_extensions import TypedDict
+
+from . import __version__
+from . import command
+from . import util
+from .util import compat
+
+
+class Config:
+ r"""Represent an Alembic configuration.
+
+ Within an ``env.py`` script, this is available
+ via the :attr:`.EnvironmentContext.config` attribute,
+ which in turn is available at ``alembic.context``::
+
+ from alembic import context
+
+ some_param = context.config.get_main_option("my option")
+
+ When invoking Alembic programmatically, a new
+ :class:`.Config` can be created by passing
+ the name of an .ini file to the constructor::
+
+ from alembic.config import Config
+ alembic_cfg = Config("/path/to/yourapp/alembic.ini")
+
+ With a :class:`.Config` object, you can then
+ run Alembic commands programmatically using the directives
+ in :mod:`alembic.command`.
+
+ The :class:`.Config` object can also be constructed without
+ a filename. Values can be set programmatically, and
+ new sections will be created as needed::
+
+ from alembic.config import Config
+ alembic_cfg = Config()
+ alembic_cfg.set_main_option("script_location", "myapp:migrations")
+ alembic_cfg.set_main_option("sqlalchemy.url", "postgresql://foo/bar")
+ alembic_cfg.set_section_option("mysection", "foo", "bar")
+
+ .. warning::
+
+ When using programmatic configuration, make sure the
+ ``env.py`` file in use is compatible with the target configuration;
+ including that the call to Python ``logging.fileConfig()`` is
+ omitted if the programmatic configuration doesn't actually include
+ logging directives.
+
+ For passing non-string values to environments, such as connections and
+ engines, use the :attr:`.Config.attributes` dictionary::
+
+ with engine.begin() as connection:
+ alembic_cfg.attributes['connection'] = connection
+ command.upgrade(alembic_cfg, "head")
+
+ :param file\_: name of the .ini file to open.
+ :param ini_section: name of the main Alembic section within the
+ .ini file
+ :param output_buffer: optional file-like input buffer which
+ will be passed to the :class:`.MigrationContext` - used to redirect
+ the output of "offline generation" when using Alembic programmatically.
+ :param stdout: buffer where the "print" output of commands will be sent.
+ Defaults to ``sys.stdout``.
+
+ :param config_args: A dictionary of keys and values that will be used
+ for substitution in the alembic config file. The dictionary as given
+ is **copied** to a new one, stored locally as the attribute
+ ``.config_args``. When the :attr:`.Config.file_config` attribute is
+ first invoked, the replacement variable ``here`` will be added to this
+ dictionary before the dictionary is passed to ``ConfigParser()``
+ to parse the .ini file.
+
+ :param attributes: optional dictionary of arbitrary Python keys/values,
+ which will be populated into the :attr:`.Config.attributes` dictionary.
+
+ .. seealso::
+
+ :ref:`connection_sharing`
+
+ """
+
+ def __init__(
+ self,
+ file_: Union[str, os.PathLike[str], None] = None,
+ ini_section: str = "alembic",
+ output_buffer: Optional[TextIO] = None,
+ stdout: TextIO = sys.stdout,
+ cmd_opts: Optional[Namespace] = None,
+ config_args: Mapping[str, Any] = util.immutabledict(),
+ attributes: Optional[dict] = None,
+ ) -> None:
+ """Construct a new :class:`.Config`"""
+ self.config_file_name = file_
+ self.config_ini_section = ini_section
+ self.output_buffer = output_buffer
+ self.stdout = stdout
+ self.cmd_opts = cmd_opts
+ self.config_args = dict(config_args)
+ if attributes:
+ self.attributes.update(attributes)
+
+ cmd_opts: Optional[Namespace] = None
+ """The command-line options passed to the ``alembic`` script.
+
+ Within an ``env.py`` script this can be accessed via the
+ :attr:`.EnvironmentContext.config` attribute.
+
+ .. seealso::
+
+ :meth:`.EnvironmentContext.get_x_argument`
+
+ """
+
+ config_file_name: Union[str, os.PathLike[str], None] = None
+ """Filesystem path to the .ini file in use."""
+
+ config_ini_section: str = None # type:ignore[assignment]
+ """Name of the config file section to read basic configuration
+ from. Defaults to ``alembic``, that is the ``[alembic]`` section
+ of the .ini file. This value is modified using the ``-n/--name``
+ option to the Alembic runner.
+
+ """
+
+ @util.memoized_property
+ def attributes(self):
+ """A Python dictionary for storage of additional state.
+
+
+ This is a utility dictionary which can include not just strings but
+ engines, connections, schema objects, or anything else.
+ Use this to pass objects into an env.py script, such as passing
+ a :class:`sqlalchemy.engine.base.Connection` when calling
+ commands from :mod:`alembic.command` programmatically.
+
+ .. seealso::
+
+ :ref:`connection_sharing`
+
+ :paramref:`.Config.attributes`
+
+ """
+ return {}
+
+ def print_stdout(self, text: str, *arg) -> None:
+ """Render a message to standard out.
+
+ When :meth:`.Config.print_stdout` is called with additional args
+ those arguments will formatted against the provided text,
+ otherwise we simply output the provided text verbatim.
+
+ This is a no-op when the``quiet`` messaging option is enabled.
+
+ e.g.::
+
+ >>> config.print_stdout('Some text %s', 'arg')
+ Some Text arg
+
+ """
+
+ if arg:
+ output = str(text) % arg
+ else:
+ output = str(text)
+
+ util.write_outstream(self.stdout, output, "\n", **self.messaging_opts)
+
+ @util.memoized_property
+ def file_config(self):
+ """Return the underlying ``ConfigParser`` object.
+
+ Direct access to the .ini file is available here,
+ though the :meth:`.Config.get_section` and
+ :meth:`.Config.get_main_option`
+ methods provide a possibly simpler interface.
+
+ """
+
+ if self.config_file_name:
+ here = os.path.abspath(os.path.dirname(self.config_file_name))
+ else:
+ here = ""
+ self.config_args["here"] = here
+ file_config = ConfigParser(self.config_args)
+ if self.config_file_name:
+ compat.read_config_parser(file_config, [self.config_file_name])
+ else:
+ file_config.add_section(self.config_ini_section)
+ return file_config
+
+ def get_template_directory(self) -> str:
+ """Return the directory where Alembic setup templates are found.
+
+ This method is used by the alembic ``init`` and ``list_templates``
+ commands.
+
+ """
+ import alembic
+
+ package_dir = os.path.abspath(os.path.dirname(alembic.__file__))
+ return os.path.join(package_dir, "templates")
+
+ @overload
+ def get_section(
+ self, name: str, default: None = ...
+ ) -> Optional[Dict[str, str]]:
+ ...
+
+ # "default" here could also be a TypeVar
+ # _MT = TypeVar("_MT", bound=Mapping[str, str]),
+ # however mypy wasn't handling that correctly (pyright was)
+ @overload
+ def get_section(
+ self, name: str, default: Dict[str, str]
+ ) -> Dict[str, str]:
+ ...
+
+ @overload
+ def get_section(
+ self, name: str, default: Mapping[str, str]
+ ) -> Union[Dict[str, str], Mapping[str, str]]:
+ ...
+
+ def get_section(
+ self, name: str, default: Optional[Mapping[str, str]] = None
+ ) -> Optional[Mapping[str, str]]:
+ """Return all the configuration options from a given .ini file section
+ as a dictionary.
+
+ If the given section does not exist, the value of ``default``
+ is returned, which is expected to be a dictionary or other mapping.
+
+ """
+ if not self.file_config.has_section(name):
+ return default
+
+ return dict(self.file_config.items(name))
+
+ def set_main_option(self, name: str, value: str) -> None:
+ """Set an option programmatically within the 'main' section.
+
+ This overrides whatever was in the .ini file.
+
+ :param name: name of the value
+
+ :param value: the value. Note that this value is passed to
+ ``ConfigParser.set``, which supports variable interpolation using
+ pyformat (e.g. ``%(some_value)s``). A raw percent sign not part of
+ an interpolation symbol must therefore be escaped, e.g. ``%%``.
+ The given value may refer to another value already in the file
+ using the interpolation format.
+
+ """
+ self.set_section_option(self.config_ini_section, name, value)
+
+ def remove_main_option(self, name: str) -> None:
+ self.file_config.remove_option(self.config_ini_section, name)
+
+ def set_section_option(self, section: str, name: str, value: str) -> None:
+ """Set an option programmatically within the given section.
+
+ The section is created if it doesn't exist already.
+ The value here will override whatever was in the .ini
+ file.
+
+ :param section: name of the section
+
+ :param name: name of the value
+
+ :param value: the value. Note that this value is passed to
+ ``ConfigParser.set``, which supports variable interpolation using
+ pyformat (e.g. ``%(some_value)s``). A raw percent sign not part of
+ an interpolation symbol must therefore be escaped, e.g. ``%%``.
+ The given value may refer to another value already in the file
+ using the interpolation format.
+
+ """
+
+ if not self.file_config.has_section(section):
+ self.file_config.add_section(section)
+ self.file_config.set(section, name, value)
+
+ def get_section_option(
+ self, section: str, name: str, default: Optional[str] = None
+ ) -> Optional[str]:
+ """Return an option from the given section of the .ini file."""
+ if not self.file_config.has_section(section):
+ raise util.CommandError(
+ "No config file %r found, or file has no "
+ "'[%s]' section" % (self.config_file_name, section)
+ )
+ if self.file_config.has_option(section, name):
+ return self.file_config.get(section, name)
+ else:
+ return default
+
+ @overload
+ def get_main_option(self, name: str, default: str) -> str:
+ ...
+
+ @overload
+ def get_main_option(
+ self, name: str, default: Optional[str] = None
+ ) -> Optional[str]:
+ ...
+
+ def get_main_option(self, name, default=None):
+ """Return an option from the 'main' section of the .ini file.
+
+ This defaults to being a key from the ``[alembic]``
+ section, unless the ``-n/--name`` flag were used to
+ indicate a different section.
+
+ """
+ return self.get_section_option(self.config_ini_section, name, default)
+
+ @util.memoized_property
+ def messaging_opts(self) -> MessagingOptions:
+ """The messaging options."""
+ return cast(
+ MessagingOptions,
+ util.immutabledict(
+ {"quiet": getattr(self.cmd_opts, "quiet", False)}
+ ),
+ )
+
+
+class MessagingOptions(TypedDict, total=False):
+ quiet: bool
+
+
+class CommandLine:
+ def __init__(self, prog: Optional[str] = None) -> None:
+ self._generate_args(prog)
+
+ def _generate_args(self, prog: Optional[str]) -> None:
+ def add_options(fn, parser, positional, kwargs):
+ kwargs_opts = {
+ "template": (
+ "-t",
+ "--template",
+ dict(
+ default="generic",
+ type=str,
+ help="Setup template for use with 'init'",
+ ),
+ ),
+ "message": (
+ "-m",
+ "--message",
+ dict(
+ type=str, help="Message string to use with 'revision'"
+ ),
+ ),
+ "sql": (
+ "--sql",
+ dict(
+ action="store_true",
+ help="Don't emit SQL to database - dump to "
+ "standard output/file instead. See docs on "
+ "offline mode.",
+ ),
+ ),
+ "tag": (
+ "--tag",
+ dict(
+ type=str,
+ help="Arbitrary 'tag' name - can be used by "
+ "custom env.py scripts.",
+ ),
+ ),
+ "head": (
+ "--head",
+ dict(
+ type=str,
+ help="Specify head revision or @head "
+ "to base new revision on.",
+ ),
+ ),
+ "splice": (
+ "--splice",
+ dict(
+ action="store_true",
+ help="Allow a non-head revision as the "
+ "'head' to splice onto",
+ ),
+ ),
+ "depends_on": (
+ "--depends-on",
+ dict(
+ action="append",
+ help="Specify one or more revision identifiers "
+ "which this revision should depend on.",
+ ),
+ ),
+ "rev_id": (
+ "--rev-id",
+ dict(
+ type=str,
+ help="Specify a hardcoded revision id instead of "
+ "generating one",
+ ),
+ ),
+ "version_path": (
+ "--version-path",
+ dict(
+ type=str,
+ help="Specify specific path from config for "
+ "version file",
+ ),
+ ),
+ "branch_label": (
+ "--branch-label",
+ dict(
+ type=str,
+ help="Specify a branch label to apply to the "
+ "new revision",
+ ),
+ ),
+ "verbose": (
+ "-v",
+ "--verbose",
+ dict(action="store_true", help="Use more verbose output"),
+ ),
+ "resolve_dependencies": (
+ "--resolve-dependencies",
+ dict(
+ action="store_true",
+ help="Treat dependency versions as down revisions",
+ ),
+ ),
+ "autogenerate": (
+ "--autogenerate",
+ dict(
+ action="store_true",
+ help="Populate revision script with candidate "
+ "migration operations, based on comparison "
+ "of database to model.",
+ ),
+ ),
+ "rev_range": (
+ "-r",
+ "--rev-range",
+ dict(
+ action="store",
+ help="Specify a revision range; "
+ "format is [start]:[end]",
+ ),
+ ),
+ "indicate_current": (
+ "-i",
+ "--indicate-current",
+ dict(
+ action="store_true",
+ help="Indicate the current revision",
+ ),
+ ),
+ "purge": (
+ "--purge",
+ dict(
+ action="store_true",
+ help="Unconditionally erase the version table "
+ "before stamping",
+ ),
+ ),
+ "package": (
+ "--package",
+ dict(
+ action="store_true",
+ help="Write empty __init__.py files to the "
+ "environment and version locations",
+ ),
+ ),
+ }
+ positional_help = {
+ "directory": "location of scripts directory",
+ "revision": "revision identifier",
+ "revisions": "one or more revisions, or 'heads' for all heads",
+ }
+ for arg in kwargs:
+ if arg in kwargs_opts:
+ args = kwargs_opts[arg]
+ args, kw = args[0:-1], args[-1]
+ parser.add_argument(*args, **kw)
+
+ for arg in positional:
+ if (
+ arg == "revisions"
+ or fn in positional_translations
+ and positional_translations[fn][arg] == "revisions"
+ ):
+ subparser.add_argument(
+ "revisions",
+ nargs="+",
+ help=positional_help.get("revisions"),
+ )
+ else:
+ subparser.add_argument(arg, help=positional_help.get(arg))
+
+ parser = ArgumentParser(prog=prog)
+
+ parser.add_argument(
+ "--version", action="version", version="%%(prog)s %s" % __version__
+ )
+ parser.add_argument(
+ "-c",
+ "--config",
+ type=str,
+ default=os.environ.get("ALEMBIC_CONFIG", "alembic.ini"),
+ help="Alternate config file; defaults to value of "
+ 'ALEMBIC_CONFIG environment variable, or "alembic.ini"',
+ )
+ parser.add_argument(
+ "-n",
+ "--name",
+ type=str,
+ default="alembic",
+ help="Name of section in .ini file to " "use for Alembic config",
+ )
+ parser.add_argument(
+ "-x",
+ action="append",
+ help="Additional arguments consumed by "
+ "custom env.py scripts, e.g. -x "
+ "setting1=somesetting -x setting2=somesetting",
+ )
+ parser.add_argument(
+ "--raiseerr",
+ action="store_true",
+ help="Raise a full stack trace on error",
+ )
+ parser.add_argument(
+ "-q",
+ "--quiet",
+ action="store_true",
+ help="Do not log to std output.",
+ )
+ subparsers = parser.add_subparsers()
+
+ positional_translations = {command.stamp: {"revision": "revisions"}}
+
+ for fn in [getattr(command, n) for n in dir(command)]:
+ if (
+ inspect.isfunction(fn)
+ and fn.__name__[0] != "_"
+ and fn.__module__ == "alembic.command"
+ ):
+ spec = compat.inspect_getfullargspec(fn)
+ if spec[3] is not None:
+ positional = spec[0][1 : -len(spec[3])]
+ kwarg = spec[0][-len(spec[3]) :]
+ else:
+ positional = spec[0][1:]
+ kwarg = []
+
+ if fn in positional_translations:
+ positional = [
+ positional_translations[fn].get(name, name)
+ for name in positional
+ ]
+
+ # parse first line(s) of helptext without a line break
+ help_ = fn.__doc__
+ if help_:
+ help_text = []
+ for line in help_.split("\n"):
+ if not line.strip():
+ break
+ else:
+ help_text.append(line.strip())
+ else:
+ help_text = []
+ subparser = subparsers.add_parser(
+ fn.__name__, help=" ".join(help_text)
+ )
+ add_options(fn, subparser, positional, kwarg)
+ subparser.set_defaults(cmd=(fn, positional, kwarg))
+ self.parser = parser
+
+ def run_cmd(self, config: Config, options: Namespace) -> None:
+ fn, positional, kwarg = options.cmd
+
+ try:
+ fn(
+ config,
+ *[getattr(options, k, None) for k in positional],
+ **{k: getattr(options, k, None) for k in kwarg},
+ )
+ except util.CommandError as e:
+ if options.raiseerr:
+ raise
+ else:
+ util.err(str(e), **config.messaging_opts)
+
+ def main(self, argv=None):
+ options = self.parser.parse_args(argv)
+ if not hasattr(options, "cmd"):
+ # see http://bugs.python.org/issue9253, argparse
+ # behavior changed incompatibly in py3.3
+ self.parser.error("too few arguments")
+ else:
+ cfg = Config(
+ file_=options.config,
+ ini_section=options.name,
+ cmd_opts=options,
+ )
+ self.run_cmd(cfg, options)
+
+
+def main(argv=None, prog=None, **kwargs):
+ """The console runner function for Alembic."""
+
+ CommandLine(prog=prog).main(argv=argv)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/context.py b/Backend/venv/lib/python3.12/site-packages/alembic/context.py
new file mode 100644
index 00000000..758fca87
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/context.py
@@ -0,0 +1,5 @@
+from .runtime.environment import EnvironmentContext
+
+# create proxy functions for
+# each method on the EnvironmentContext class.
+EnvironmentContext.create_module_class_proxy(globals(), locals())
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/context.pyi b/Backend/venv/lib/python3.12/site-packages/alembic/context.pyi
new file mode 100644
index 00000000..f37f2461
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/context.pyi
@@ -0,0 +1,847 @@
+# ### this file stubs are generated by tools/write_pyi.py - do not edit ###
+# ### imports are manually managed
+from __future__ import annotations
+
+from typing import Any
+from typing import Callable
+from typing import Collection
+from typing import ContextManager
+from typing import Dict
+from typing import Iterable
+from typing import List
+from typing import Literal
+from typing import Mapping
+from typing import MutableMapping
+from typing import Optional
+from typing import overload
+from typing import TextIO
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+if TYPE_CHECKING:
+ from sqlalchemy.engine.base import Connection
+ from sqlalchemy.engine.url import URL
+ from sqlalchemy.sql import Executable
+ from sqlalchemy.sql.schema import Column
+ from sqlalchemy.sql.schema import FetchedValue
+ from sqlalchemy.sql.schema import MetaData
+ from sqlalchemy.sql.schema import SchemaItem
+ from sqlalchemy.sql.type_api import TypeEngine
+
+ from .autogenerate.api import AutogenContext
+ from .config import Config
+ from .operations.ops import MigrationScript
+ from .runtime.migration import _ProxyTransaction
+ from .runtime.migration import MigrationContext
+ from .runtime.migration import MigrationInfo
+ from .script import ScriptDirectory
+
+### end imports ###
+
+def begin_transaction() -> Union[_ProxyTransaction, ContextManager[None]]:
+ """Return a context manager that will
+ enclose an operation within a "transaction",
+ as defined by the environment's offline
+ and transactional DDL settings.
+
+ e.g.::
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+ :meth:`.begin_transaction` is intended to
+ "do the right thing" regardless of
+ calling context:
+
+ * If :meth:`.is_transactional_ddl` is ``False``,
+ returns a "do nothing" context manager
+ which otherwise produces no transactional
+ state or directives.
+ * If :meth:`.is_offline_mode` is ``True``,
+ returns a context manager that will
+ invoke the :meth:`.DefaultImpl.emit_begin`
+ and :meth:`.DefaultImpl.emit_commit`
+ methods, which will produce the string
+ directives ``BEGIN`` and ``COMMIT`` on
+ the output stream, as rendered by the
+ target backend (e.g. SQL Server would
+ emit ``BEGIN TRANSACTION``).
+ * Otherwise, calls :meth:`sqlalchemy.engine.Connection.begin`
+ on the current online connection, which
+ returns a :class:`sqlalchemy.engine.Transaction`
+ object. This object demarcates a real
+ transaction and is itself a context manager,
+ which will roll back if an exception
+ is raised.
+
+ Note that a custom ``env.py`` script which
+ has more specific transactional needs can of course
+ manipulate the :class:`~sqlalchemy.engine.Connection`
+ directly to produce transactional state in "online"
+ mode.
+
+ """
+
+config: Config
+
+def configure(
+ connection: Optional[Connection] = None,
+ url: Union[str, URL, None] = None,
+ dialect_name: Optional[str] = None,
+ dialect_opts: Optional[Dict[str, Any]] = None,
+ transactional_ddl: Optional[bool] = None,
+ transaction_per_migration: bool = False,
+ output_buffer: Optional[TextIO] = None,
+ starting_rev: Optional[str] = None,
+ tag: Optional[str] = None,
+ template_args: Optional[Dict[str, Any]] = None,
+ render_as_batch: bool = False,
+ target_metadata: Optional[MetaData] = None,
+ include_name: Optional[
+ Callable[
+ [
+ Optional[str],
+ Literal[
+ "schema",
+ "table",
+ "column",
+ "index",
+ "unique_constraint",
+ "foreign_key_constraint",
+ ],
+ MutableMapping[
+ Literal[
+ "schema_name",
+ "table_name",
+ "schema_qualified_table_name",
+ ],
+ Optional[str],
+ ],
+ ],
+ bool,
+ ]
+ ] = None,
+ include_object: Optional[
+ Callable[
+ [
+ SchemaItem,
+ Optional[str],
+ Literal[
+ "schema",
+ "table",
+ "column",
+ "index",
+ "unique_constraint",
+ "foreign_key_constraint",
+ ],
+ bool,
+ Optional[SchemaItem],
+ ],
+ bool,
+ ]
+ ] = None,
+ include_schemas: bool = False,
+ process_revision_directives: Optional[
+ Callable[
+ [
+ MigrationContext,
+ Union[str, Iterable[Optional[str]], Iterable[str]],
+ List[MigrationScript],
+ ],
+ None,
+ ]
+ ] = None,
+ compare_type: Union[
+ bool,
+ Callable[
+ [
+ MigrationContext,
+ Column[Any],
+ Column[Any],
+ TypeEngine,
+ TypeEngine,
+ ],
+ Optional[bool],
+ ],
+ ] = True,
+ compare_server_default: Union[
+ bool,
+ Callable[
+ [
+ MigrationContext,
+ Column[Any],
+ Column[Any],
+ Optional[str],
+ Optional[FetchedValue],
+ Optional[str],
+ ],
+ Optional[bool],
+ ],
+ ] = False,
+ render_item: Optional[
+ Callable[[str, Any, AutogenContext], Union[str, Literal[False]]]
+ ] = None,
+ literal_binds: bool = False,
+ upgrade_token: str = "upgrades",
+ downgrade_token: str = "downgrades",
+ alembic_module_prefix: str = "op.",
+ sqlalchemy_module_prefix: str = "sa.",
+ user_module_prefix: Optional[str] = None,
+ on_version_apply: Optional[
+ Callable[
+ [
+ MigrationContext,
+ MigrationInfo,
+ Collection[Any],
+ Mapping[str, Any],
+ ],
+ None,
+ ]
+ ] = None,
+ **kw: Any,
+) -> None:
+ """Configure a :class:`.MigrationContext` within this
+ :class:`.EnvironmentContext` which will provide database
+ connectivity and other configuration to a series of
+ migration scripts.
+
+ Many methods on :class:`.EnvironmentContext` require that
+ this method has been called in order to function, as they
+ ultimately need to have database access or at least access
+ to the dialect in use. Those which do are documented as such.
+
+ The important thing needed by :meth:`.configure` is a
+ means to determine what kind of database dialect is in use.
+ An actual connection to that database is needed only if
+ the :class:`.MigrationContext` is to be used in
+ "online" mode.
+
+ If the :meth:`.is_offline_mode` function returns ``True``,
+ then no connection is needed here. Otherwise, the
+ ``connection`` parameter should be present as an
+ instance of :class:`sqlalchemy.engine.Connection`.
+
+ This function is typically called from the ``env.py``
+ script within a migration environment. It can be called
+ multiple times for an invocation. The most recent
+ :class:`~sqlalchemy.engine.Connection`
+ for which it was called is the one that will be operated upon
+ by the next call to :meth:`.run_migrations`.
+
+ General parameters:
+
+ :param connection: a :class:`~sqlalchemy.engine.Connection`
+ to use
+ for SQL execution in "online" mode. When present, is also
+ used to determine the type of dialect in use.
+ :param url: a string database url, or a
+ :class:`sqlalchemy.engine.url.URL` object.
+ The type of dialect to be used will be derived from this if
+ ``connection`` is not passed.
+ :param dialect_name: string name of a dialect, such as
+ "postgresql", "mssql", etc.
+ The type of dialect to be used will be derived from this if
+ ``connection`` and ``url`` are not passed.
+ :param dialect_opts: dictionary of options to be passed to dialect
+ constructor.
+ :param transactional_ddl: Force the usage of "transactional"
+ DDL on or off;
+ this otherwise defaults to whether or not the dialect in
+ use supports it.
+ :param transaction_per_migration: if True, nest each migration script
+ in a transaction rather than the full series of migrations to
+ run.
+ :param output_buffer: a file-like object that will be used
+ for textual output
+ when the ``--sql`` option is used to generate SQL scripts.
+ Defaults to
+ ``sys.stdout`` if not passed here and also not present on
+ the :class:`.Config`
+ object. The value here overrides that of the :class:`.Config`
+ object.
+ :param output_encoding: when using ``--sql`` to generate SQL
+ scripts, apply this encoding to the string output.
+ :param literal_binds: when using ``--sql`` to generate SQL
+ scripts, pass through the ``literal_binds`` flag to the compiler
+ so that any literal values that would ordinarily be bound
+ parameters are converted to plain strings.
+
+ .. warning:: Dialects can typically only handle simple datatypes
+ like strings and numbers for auto-literal generation. Datatypes
+ like dates, intervals, and others may still require manual
+ formatting, typically using :meth:`.Operations.inline_literal`.
+
+ .. note:: the ``literal_binds`` flag is ignored on SQLAlchemy
+ versions prior to 0.8 where this feature is not supported.
+
+ .. seealso::
+
+ :meth:`.Operations.inline_literal`
+
+ :param starting_rev: Override the "starting revision" argument
+ when using ``--sql`` mode.
+ :param tag: a string tag for usage by custom ``env.py`` scripts.
+ Set via the ``--tag`` option, can be overridden here.
+ :param template_args: dictionary of template arguments which
+ will be added to the template argument environment when
+ running the "revision" command. Note that the script environment
+ is only run within the "revision" command if the --autogenerate
+ option is used, or if the option "revision_environment=true"
+ is present in the alembic.ini file.
+
+ :param version_table: The name of the Alembic version table.
+ The default is ``'alembic_version'``.
+ :param version_table_schema: Optional schema to place version
+ table within.
+ :param version_table_pk: boolean, whether the Alembic version table
+ should use a primary key constraint for the "value" column; this
+ only takes effect when the table is first created.
+ Defaults to True; setting to False should not be necessary and is
+ here for backwards compatibility reasons.
+ :param on_version_apply: a callable or collection of callables to be
+ run for each migration step.
+ The callables will be run in the order they are given, once for
+ each migration step, after the respective operation has been
+ applied but before its transaction is finalized.
+ Each callable accepts no positional arguments and the following
+ keyword arguments:
+
+ * ``ctx``: the :class:`.MigrationContext` running the migration,
+ * ``step``: a :class:`.MigrationInfo` representing the
+ step currently being applied,
+ * ``heads``: a collection of version strings representing the
+ current heads,
+ * ``run_args``: the ``**kwargs`` passed to :meth:`.run_migrations`.
+
+ Parameters specific to the autogenerate feature, when
+ ``alembic revision`` is run with the ``--autogenerate`` feature:
+
+ :param target_metadata: a :class:`sqlalchemy.schema.MetaData`
+ object, or a sequence of :class:`~sqlalchemy.schema.MetaData`
+ objects, that will be consulted during autogeneration.
+ The tables present in each :class:`~sqlalchemy.schema.MetaData`
+ will be compared against
+ what is locally available on the target
+ :class:`~sqlalchemy.engine.Connection`
+ to produce candidate upgrade/downgrade operations.
+ :param compare_type: Indicates type comparison behavior during
+ an autogenerate
+ operation. Defaults to ``True`` turning on type comparison, which
+ has good accuracy on most backends. See :ref:`compare_types`
+ for an example as well as information on other type
+ comparison options. Set to ``False`` which disables type
+ comparison. A callable can also be passed to provide custom type
+ comparison, see :ref:`compare_types` for additional details.
+
+ .. versionchanged:: 1.12.0 The default value of
+ :paramref:`.EnvironmentContext.configure.compare_type` has been
+ changed to ``True``.
+
+ .. seealso::
+
+ :ref:`compare_types`
+
+ :paramref:`.EnvironmentContext.configure.compare_server_default`
+
+ :param compare_server_default: Indicates server default comparison
+ behavior during
+ an autogenerate operation. Defaults to ``False`` which disables
+ server default
+ comparison. Set to ``True`` to turn on server default comparison,
+ which has
+ varied accuracy depending on backend.
+
+ To customize server default comparison behavior, a callable may
+ be specified
+ which can filter server default comparisons during an
+ autogenerate operation.
+ defaults during an autogenerate operation. The format of this
+ callable is::
+
+ def my_compare_server_default(context, inspected_column,
+ metadata_column, inspected_default, metadata_default,
+ rendered_metadata_default):
+ # return True if the defaults are different,
+ # False if not, or None to allow the default implementation
+ # to compare these defaults
+ return None
+
+ context.configure(
+ # ...
+ compare_server_default = my_compare_server_default
+ )
+
+ ``inspected_column`` is a dictionary structure as returned by
+ :meth:`sqlalchemy.engine.reflection.Inspector.get_columns`, whereas
+ ``metadata_column`` is a :class:`sqlalchemy.schema.Column` from
+ the local model environment.
+
+ A return value of ``None`` indicates to allow default server default
+ comparison
+ to proceed. Note that some backends such as Postgresql actually
+ execute
+ the two defaults on the database side to compare for equivalence.
+
+ .. seealso::
+
+ :paramref:`.EnvironmentContext.configure.compare_type`
+
+ :param include_name: A callable function which is given
+ the chance to return ``True`` or ``False`` for any database reflected
+ object based on its name, including database schema names when
+ the :paramref:`.EnvironmentContext.configure.include_schemas` flag
+ is set to ``True``.
+
+ The function accepts the following positional arguments:
+
+ * ``name``: the name of the object, such as schema name or table name.
+ Will be ``None`` when indicating the default schema name of the
+ database connection.
+ * ``type``: a string describing the type of object; currently
+ ``"schema"``, ``"table"``, ``"column"``, ``"index"``,
+ ``"unique_constraint"``, or ``"foreign_key_constraint"``
+ * ``parent_names``: a dictionary of "parent" object names, that are
+ relative to the name being given. Keys in this dictionary may
+ include: ``"schema_name"``, ``"table_name"`` or
+ ``"schema_qualified_table_name"``.
+
+ E.g.::
+
+ def include_name(name, type_, parent_names):
+ if type_ == "schema":
+ return name in ["schema_one", "schema_two"]
+ else:
+ return True
+
+ context.configure(
+ # ...
+ include_schemas = True,
+ include_name = include_name
+ )
+
+ .. seealso::
+
+ :ref:`autogenerate_include_hooks`
+
+ :paramref:`.EnvironmentContext.configure.include_object`
+
+ :paramref:`.EnvironmentContext.configure.include_schemas`
+
+
+ :param include_object: A callable function which is given
+ the chance to return ``True`` or ``False`` for any object,
+ indicating if the given object should be considered in the
+ autogenerate sweep.
+
+ The function accepts the following positional arguments:
+
+ * ``object``: a :class:`~sqlalchemy.schema.SchemaItem` object such
+ as a :class:`~sqlalchemy.schema.Table`,
+ :class:`~sqlalchemy.schema.Column`,
+ :class:`~sqlalchemy.schema.Index`
+ :class:`~sqlalchemy.schema.UniqueConstraint`,
+ or :class:`~sqlalchemy.schema.ForeignKeyConstraint` object
+ * ``name``: the name of the object. This is typically available
+ via ``object.name``.
+ * ``type``: a string describing the type of object; currently
+ ``"table"``, ``"column"``, ``"index"``, ``"unique_constraint"``,
+ or ``"foreign_key_constraint"``
+ * ``reflected``: ``True`` if the given object was produced based on
+ table reflection, ``False`` if it's from a local :class:`.MetaData`
+ object.
+ * ``compare_to``: the object being compared against, if available,
+ else ``None``.
+
+ E.g.::
+
+ def include_object(object, name, type_, reflected, compare_to):
+ if (type_ == "column" and
+ not reflected and
+ object.info.get("skip_autogenerate", False)):
+ return False
+ else:
+ return True
+
+ context.configure(
+ # ...
+ include_object = include_object
+ )
+
+ For the use case of omitting specific schemas from a target database
+ when :paramref:`.EnvironmentContext.configure.include_schemas` is
+ set to ``True``, the :attr:`~sqlalchemy.schema.Table.schema`
+ attribute can be checked for each :class:`~sqlalchemy.schema.Table`
+ object passed to the hook, however it is much more efficient
+ to filter on schemas before reflection of objects takes place
+ using the :paramref:`.EnvironmentContext.configure.include_name`
+ hook.
+
+ .. seealso::
+
+ :ref:`autogenerate_include_hooks`
+
+ :paramref:`.EnvironmentContext.configure.include_name`
+
+ :paramref:`.EnvironmentContext.configure.include_schemas`
+
+ :param render_as_batch: if True, commands which alter elements
+ within a table will be placed under a ``with batch_alter_table():``
+ directive, so that batch migrations will take place.
+
+ .. seealso::
+
+ :ref:`batch_migrations`
+
+ :param include_schemas: If True, autogenerate will scan across
+ all schemas located by the SQLAlchemy
+ :meth:`~sqlalchemy.engine.reflection.Inspector.get_schema_names`
+ method, and include all differences in tables found across all
+ those schemas. When using this option, you may want to also
+ use the :paramref:`.EnvironmentContext.configure.include_name`
+ parameter to specify a callable which
+ can filter the tables/schemas that get included.
+
+ .. seealso::
+
+ :ref:`autogenerate_include_hooks`
+
+ :paramref:`.EnvironmentContext.configure.include_name`
+
+ :paramref:`.EnvironmentContext.configure.include_object`
+
+ :param render_item: Callable that can be used to override how
+ any schema item, i.e. column, constraint, type,
+ etc., is rendered for autogenerate. The callable receives a
+ string describing the type of object, the object, and
+ the autogen context. If it returns False, the
+ default rendering method will be used. If it returns None,
+ the item will not be rendered in the context of a Table
+ construct, that is, can be used to skip columns or constraints
+ within op.create_table()::
+
+ def my_render_column(type_, col, autogen_context):
+ if type_ == "column" and isinstance(col, MySpecialCol):
+ return repr(col)
+ else:
+ return False
+
+ context.configure(
+ # ...
+ render_item = my_render_column
+ )
+
+ Available values for the type string include: ``"column"``,
+ ``"primary_key"``, ``"foreign_key"``, ``"unique"``, ``"check"``,
+ ``"type"``, ``"server_default"``.
+
+ .. seealso::
+
+ :ref:`autogen_render_types`
+
+ :param upgrade_token: When autogenerate completes, the text of the
+ candidate upgrade operations will be present in this template
+ variable when ``script.py.mako`` is rendered. Defaults to
+ ``upgrades``.
+ :param downgrade_token: When autogenerate completes, the text of the
+ candidate downgrade operations will be present in this
+ template variable when ``script.py.mako`` is rendered. Defaults to
+ ``downgrades``.
+
+ :param alembic_module_prefix: When autogenerate refers to Alembic
+ :mod:`alembic.operations` constructs, this prefix will be used
+ (i.e. ``op.create_table``) Defaults to "``op.``".
+ Can be ``None`` to indicate no prefix.
+
+ :param sqlalchemy_module_prefix: When autogenerate refers to
+ SQLAlchemy
+ :class:`~sqlalchemy.schema.Column` or type classes, this prefix
+ will be used
+ (i.e. ``sa.Column("somename", sa.Integer)``) Defaults to "``sa.``".
+ Can be ``None`` to indicate no prefix.
+ Note that when dialect-specific types are rendered, autogenerate
+ will render them using the dialect module name, i.e. ``mssql.BIT()``,
+ ``postgresql.UUID()``.
+
+ :param user_module_prefix: When autogenerate refers to a SQLAlchemy
+ type (e.g. :class:`.TypeEngine`) where the module name is not
+ under the ``sqlalchemy`` namespace, this prefix will be used
+ within autogenerate. If left at its default of
+ ``None``, the ``__module__`` attribute of the type is used to
+ render the import module. It's a good practice to set this
+ and to have all custom types be available from a fixed module space,
+ in order to future-proof migration files against reorganizations
+ in modules.
+
+ .. seealso::
+
+ :ref:`autogen_module_prefix`
+
+ :param process_revision_directives: a callable function that will
+ be passed a structure representing the end result of an autogenerate
+ or plain "revision" operation, which can be manipulated to affect
+ how the ``alembic revision`` command ultimately outputs new
+ revision scripts. The structure of the callable is::
+
+ def process_revision_directives(context, revision, directives):
+ pass
+
+ The ``directives`` parameter is a Python list containing
+ a single :class:`.MigrationScript` directive, which represents
+ the revision file to be generated. This list as well as its
+ contents may be freely modified to produce any set of commands.
+ The section :ref:`customizing_revision` shows an example of
+ doing this. The ``context`` parameter is the
+ :class:`.MigrationContext` in use,
+ and ``revision`` is a tuple of revision identifiers representing the
+ current revision of the database.
+
+ The callable is invoked at all times when the ``--autogenerate``
+ option is passed to ``alembic revision``. If ``--autogenerate``
+ is not passed, the callable is invoked only if the
+ ``revision_environment`` variable is set to True in the Alembic
+ configuration, in which case the given ``directives`` collection
+ will contain empty :class:`.UpgradeOps` and :class:`.DowngradeOps`
+ collections for ``.upgrade_ops`` and ``.downgrade_ops``. The
+ ``--autogenerate`` option itself can be inferred by inspecting
+ ``context.config.cmd_opts.autogenerate``.
+
+ The callable function may optionally be an instance of
+ a :class:`.Rewriter` object. This is a helper object that
+ assists in the production of autogenerate-stream rewriter functions.
+
+ .. seealso::
+
+ :ref:`customizing_revision`
+
+ :ref:`autogen_rewriter`
+
+ :paramref:`.command.revision.process_revision_directives`
+
+ Parameters specific to individual backends:
+
+ :param mssql_batch_separator: The "batch separator" which will
+ be placed between each statement when generating offline SQL Server
+ migrations. Defaults to ``GO``. Note this is in addition to the
+ customary semicolon ``;`` at the end of each statement; SQL Server
+ considers the "batch separator" to denote the end of an
+ individual statement execution, and cannot group certain
+ dependent operations in one step.
+ :param oracle_batch_separator: The "batch separator" which will
+ be placed between each statement when generating offline
+ Oracle migrations. Defaults to ``/``. Oracle doesn't add a
+ semicolon between statements like most other backends.
+
+ """
+
+def execute(
+ sql: Union[Executable, str], execution_options: Optional[dict] = None
+) -> None:
+ """Execute the given SQL using the current change context.
+
+ The behavior of :meth:`.execute` is the same
+ as that of :meth:`.Operations.execute`. Please see that
+ function's documentation for full detail including
+ caveats and limitations.
+
+ This function requires that a :class:`.MigrationContext` has
+ first been made available via :meth:`.configure`.
+
+ """
+
+def get_bind() -> Connection:
+ """Return the current 'bind'.
+
+ In "online" mode, this is the
+ :class:`sqlalchemy.engine.Connection` currently being used
+ to emit SQL to the database.
+
+ This function requires that a :class:`.MigrationContext`
+ has first been made available via :meth:`.configure`.
+
+ """
+
+def get_context() -> MigrationContext:
+ """Return the current :class:`.MigrationContext` object.
+
+ If :meth:`.EnvironmentContext.configure` has not been
+ called yet, raises an exception.
+
+ """
+
+def get_head_revision() -> Union[str, Tuple[str, ...], None]:
+ """Return the hex identifier of the 'head' script revision.
+
+ If the script directory has multiple heads, this
+ method raises a :class:`.CommandError`;
+ :meth:`.EnvironmentContext.get_head_revisions` should be preferred.
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ .. seealso:: :meth:`.EnvironmentContext.get_head_revisions`
+
+ """
+
+def get_head_revisions() -> Union[str, Tuple[str, ...], None]:
+ """Return the hex identifier of the 'heads' script revision(s).
+
+ This returns a tuple containing the version number of all
+ heads in the script directory.
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ """
+
+def get_revision_argument() -> Union[str, Tuple[str, ...], None]:
+ """Get the 'destination' revision argument.
+
+ This is typically the argument passed to the
+ ``upgrade`` or ``downgrade`` command.
+
+ If it was specified as ``head``, the actual
+ version number is returned; if specified
+ as ``base``, ``None`` is returned.
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ """
+
+def get_starting_revision_argument() -> Union[str, Tuple[str, ...], None]:
+ """Return the 'starting revision' argument,
+ if the revision was passed using ``start:end``.
+
+ This is only meaningful in "offline" mode.
+ Returns ``None`` if no value is available
+ or was configured.
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ """
+
+def get_tag_argument() -> Optional[str]:
+ """Return the value passed for the ``--tag`` argument, if any.
+
+ The ``--tag`` argument is not used directly by Alembic,
+ but is available for custom ``env.py`` configurations that
+ wish to use it; particularly for offline generation scripts
+ that wish to generate tagged filenames.
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ .. seealso::
+
+ :meth:`.EnvironmentContext.get_x_argument` - a newer and more
+ open ended system of extending ``env.py`` scripts via the command
+ line.
+
+ """
+
+@overload
+def get_x_argument(as_dictionary: Literal[False]) -> List[str]: ...
+@overload
+def get_x_argument(as_dictionary: Literal[True]) -> Dict[str, str]: ...
+@overload
+def get_x_argument(
+ as_dictionary: bool = ...,
+) -> Union[List[str], Dict[str, str]]:
+ """Return the value(s) passed for the ``-x`` argument, if any.
+
+ The ``-x`` argument is an open ended flag that allows any user-defined
+ value or values to be passed on the command line, then available
+ here for consumption by a custom ``env.py`` script.
+
+ The return value is a list, returned directly from the ``argparse``
+ structure. If ``as_dictionary=True`` is passed, the ``x`` arguments
+ are parsed using ``key=value`` format into a dictionary that is
+ then returned.
+
+ For example, to support passing a database URL on the command line,
+ the standard ``env.py`` script can be modified like this::
+
+ cmd_line_url = context.get_x_argument(
+ as_dictionary=True).get('dbname')
+ if cmd_line_url:
+ engine = create_engine(cmd_line_url)
+ else:
+ engine = engine_from_config(
+ config.get_section(config.config_ini_section),
+ prefix='sqlalchemy.',
+ poolclass=pool.NullPool)
+
+ This then takes effect by running the ``alembic`` script as::
+
+ alembic -x dbname=postgresql://user:pass@host/dbname upgrade head
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ .. seealso::
+
+ :meth:`.EnvironmentContext.get_tag_argument`
+
+ :attr:`.Config.cmd_opts`
+
+ """
+
+def is_offline_mode() -> bool:
+ """Return True if the current migrations environment
+ is running in "offline mode".
+
+ This is ``True`` or ``False`` depending
+ on the ``--sql`` flag passed.
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ """
+
+def is_transactional_ddl():
+ """Return True if the context is configured to expect a
+ transactional DDL capable backend.
+
+ This defaults to the type of database in use, and
+ can be overridden by the ``transactional_ddl`` argument
+ to :meth:`.configure`
+
+ This function requires that a :class:`.MigrationContext`
+ has first been made available via :meth:`.configure`.
+
+ """
+
+def run_migrations(**kw: Any) -> None:
+ """Run migrations as determined by the current command line
+ configuration
+ as well as versioning information present (or not) in the current
+ database connection (if one is present).
+
+ The function accepts optional ``**kw`` arguments. If these are
+ passed, they are sent directly to the ``upgrade()`` and
+ ``downgrade()``
+ functions within each target revision file. By modifying the
+ ``script.py.mako`` file so that the ``upgrade()`` and ``downgrade()``
+ functions accept arguments, parameters can be passed here so that
+ contextual information, usually information to identify a particular
+ database in use, can be passed from a custom ``env.py`` script
+ to the migration functions.
+
+ This function requires that a :class:`.MigrationContext` has
+ first been made available via :meth:`.configure`.
+
+ """
+
+script: ScriptDirectory
+
+def static_output(text: str) -> None:
+ """Emit text directly to the "offline" SQL stream.
+
+ Typically this is for emitting comments that
+ start with --. The statement is not treated
+ as a SQL execution, no ; or batch separator
+ is added, etc.
+
+ """
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__init__.py b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__init__.py
new file mode 100644
index 00000000..cfcc47e0
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__init__.py
@@ -0,0 +1,6 @@
+from . import mssql
+from . import mysql
+from . import oracle
+from . import postgresql
+from . import sqlite
+from .impl import DefaultImpl
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..b91f4aa2
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/base.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/base.cpython-312.pyc
new file mode 100644
index 00000000..24741380
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/base.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/impl.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/impl.cpython-312.pyc
new file mode 100644
index 00000000..12d23aab
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/impl.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/mssql.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/mssql.cpython-312.pyc
new file mode 100644
index 00000000..44cdfb28
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/mssql.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/mysql.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/mysql.cpython-312.pyc
new file mode 100644
index 00000000..3514f61f
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/mysql.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/oracle.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/oracle.cpython-312.pyc
new file mode 100644
index 00000000..7d5d6650
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/oracle.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/postgresql.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/postgresql.cpython-312.pyc
new file mode 100644
index 00000000..ddb4c29e
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/postgresql.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/sqlite.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/sqlite.cpython-312.pyc
new file mode 100644
index 00000000..766b09db
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/__pycache__/sqlite.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/base.py b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/base.py
new file mode 100644
index 00000000..339db0c4
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/base.py
@@ -0,0 +1,332 @@
+from __future__ import annotations
+
+import functools
+from typing import Optional
+from typing import TYPE_CHECKING
+from typing import Union
+
+from sqlalchemy import exc
+from sqlalchemy import Integer
+from sqlalchemy import types as sqltypes
+from sqlalchemy.ext.compiler import compiles
+from sqlalchemy.schema import Column
+from sqlalchemy.schema import DDLElement
+from sqlalchemy.sql.elements import quoted_name
+
+from ..util.sqla_compat import _columns_for_constraint # noqa
+from ..util.sqla_compat import _find_columns # noqa
+from ..util.sqla_compat import _fk_spec # noqa
+from ..util.sqla_compat import _is_type_bound # noqa
+from ..util.sqla_compat import _table_for_constraint # noqa
+
+if TYPE_CHECKING:
+ from typing import Any
+
+ from sqlalchemy.sql.compiler import Compiled
+ from sqlalchemy.sql.compiler import DDLCompiler
+ from sqlalchemy.sql.elements import TextClause
+ from sqlalchemy.sql.functions import Function
+ from sqlalchemy.sql.schema import FetchedValue
+ from sqlalchemy.sql.type_api import TypeEngine
+
+ from .impl import DefaultImpl
+ from ..util.sqla_compat import Computed
+ from ..util.sqla_compat import Identity
+
+_ServerDefault = Union["TextClause", "FetchedValue", "Function[Any]", str]
+
+
+class AlterTable(DDLElement):
+
+ """Represent an ALTER TABLE statement.
+
+ Only the string name and optional schema name of the table
+ is required, not a full Table object.
+
+ """
+
+ def __init__(
+ self,
+ table_name: str,
+ schema: Optional[Union[quoted_name, str]] = None,
+ ) -> None:
+ self.table_name = table_name
+ self.schema = schema
+
+
+class RenameTable(AlterTable):
+ def __init__(
+ self,
+ old_table_name: str,
+ new_table_name: Union[quoted_name, str],
+ schema: Optional[Union[quoted_name, str]] = None,
+ ) -> None:
+ super().__init__(old_table_name, schema=schema)
+ self.new_table_name = new_table_name
+
+
+class AlterColumn(AlterTable):
+ def __init__(
+ self,
+ name: str,
+ column_name: str,
+ schema: Optional[str] = None,
+ existing_type: Optional[TypeEngine] = None,
+ existing_nullable: Optional[bool] = None,
+ existing_server_default: Optional[_ServerDefault] = None,
+ existing_comment: Optional[str] = None,
+ ) -> None:
+ super().__init__(name, schema=schema)
+ self.column_name = column_name
+ self.existing_type = (
+ sqltypes.to_instance(existing_type)
+ if existing_type is not None
+ else None
+ )
+ self.existing_nullable = existing_nullable
+ self.existing_server_default = existing_server_default
+ self.existing_comment = existing_comment
+
+
+class ColumnNullable(AlterColumn):
+ def __init__(
+ self, name: str, column_name: str, nullable: bool, **kw
+ ) -> None:
+ super().__init__(name, column_name, **kw)
+ self.nullable = nullable
+
+
+class ColumnType(AlterColumn):
+ def __init__(
+ self, name: str, column_name: str, type_: TypeEngine, **kw
+ ) -> None:
+ super().__init__(name, column_name, **kw)
+ self.type_ = sqltypes.to_instance(type_)
+
+
+class ColumnName(AlterColumn):
+ def __init__(
+ self, name: str, column_name: str, newname: str, **kw
+ ) -> None:
+ super().__init__(name, column_name, **kw)
+ self.newname = newname
+
+
+class ColumnDefault(AlterColumn):
+ def __init__(
+ self,
+ name: str,
+ column_name: str,
+ default: Optional[_ServerDefault],
+ **kw,
+ ) -> None:
+ super().__init__(name, column_name, **kw)
+ self.default = default
+
+
+class ComputedColumnDefault(AlterColumn):
+ def __init__(
+ self, name: str, column_name: str, default: Optional[Computed], **kw
+ ) -> None:
+ super().__init__(name, column_name, **kw)
+ self.default = default
+
+
+class IdentityColumnDefault(AlterColumn):
+ def __init__(
+ self,
+ name: str,
+ column_name: str,
+ default: Optional[Identity],
+ impl: DefaultImpl,
+ **kw,
+ ) -> None:
+ super().__init__(name, column_name, **kw)
+ self.default = default
+ self.impl = impl
+
+
+class AddColumn(AlterTable):
+ def __init__(
+ self,
+ name: str,
+ column: Column[Any],
+ schema: Optional[Union[quoted_name, str]] = None,
+ ) -> None:
+ super().__init__(name, schema=schema)
+ self.column = column
+
+
+class DropColumn(AlterTable):
+ def __init__(
+ self, name: str, column: Column[Any], schema: Optional[str] = None
+ ) -> None:
+ super().__init__(name, schema=schema)
+ self.column = column
+
+
+class ColumnComment(AlterColumn):
+ def __init__(
+ self, name: str, column_name: str, comment: Optional[str], **kw
+ ) -> None:
+ super().__init__(name, column_name, **kw)
+ self.comment = comment
+
+
+@compiles(RenameTable)
+def visit_rename_table(
+ element: RenameTable, compiler: DDLCompiler, **kw
+) -> str:
+ return "%s RENAME TO %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ format_table_name(compiler, element.new_table_name, element.schema),
+ )
+
+
+@compiles(AddColumn)
+def visit_add_column(element: AddColumn, compiler: DDLCompiler, **kw) -> str:
+ return "%s %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ add_column(compiler, element.column, **kw),
+ )
+
+
+@compiles(DropColumn)
+def visit_drop_column(element: DropColumn, compiler: DDLCompiler, **kw) -> str:
+ return "%s %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ drop_column(compiler, element.column.name, **kw),
+ )
+
+
+@compiles(ColumnNullable)
+def visit_column_nullable(
+ element: ColumnNullable, compiler: DDLCompiler, **kw
+) -> str:
+ return "%s %s %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ alter_column(compiler, element.column_name),
+ "DROP NOT NULL" if element.nullable else "SET NOT NULL",
+ )
+
+
+@compiles(ColumnType)
+def visit_column_type(element: ColumnType, compiler: DDLCompiler, **kw) -> str:
+ return "%s %s %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ alter_column(compiler, element.column_name),
+ "TYPE %s" % format_type(compiler, element.type_),
+ )
+
+
+@compiles(ColumnName)
+def visit_column_name(element: ColumnName, compiler: DDLCompiler, **kw) -> str:
+ return "%s RENAME %s TO %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ format_column_name(compiler, element.column_name),
+ format_column_name(compiler, element.newname),
+ )
+
+
+@compiles(ColumnDefault)
+def visit_column_default(
+ element: ColumnDefault, compiler: DDLCompiler, **kw
+) -> str:
+ return "%s %s %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ alter_column(compiler, element.column_name),
+ "SET DEFAULT %s" % format_server_default(compiler, element.default)
+ if element.default is not None
+ else "DROP DEFAULT",
+ )
+
+
+@compiles(ComputedColumnDefault)
+def visit_computed_column(
+ element: ComputedColumnDefault, compiler: DDLCompiler, **kw
+):
+ raise exc.CompileError(
+ 'Adding or removing a "computed" construct, e.g. GENERATED '
+ "ALWAYS AS, to or from an existing column is not supported."
+ )
+
+
+@compiles(IdentityColumnDefault)
+def visit_identity_column(
+ element: IdentityColumnDefault, compiler: DDLCompiler, **kw
+):
+ raise exc.CompileError(
+ 'Adding, removing or modifying an "identity" construct, '
+ "e.g. GENERATED AS IDENTITY, to or from an existing "
+ "column is not supported in this dialect."
+ )
+
+
+def quote_dotted(
+ name: Union[quoted_name, str], quote: functools.partial
+) -> Union[quoted_name, str]:
+ """quote the elements of a dotted name"""
+
+ if isinstance(name, quoted_name):
+ return quote(name)
+ result = ".".join([quote(x) for x in name.split(".")])
+ return result
+
+
+def format_table_name(
+ compiler: Compiled,
+ name: Union[quoted_name, str],
+ schema: Optional[Union[quoted_name, str]],
+) -> Union[quoted_name, str]:
+ quote = functools.partial(compiler.preparer.quote)
+ if schema:
+ return quote_dotted(schema, quote) + "." + quote(name)
+ else:
+ return quote(name)
+
+
+def format_column_name(
+ compiler: DDLCompiler, name: Optional[Union[quoted_name, str]]
+) -> Union[quoted_name, str]:
+ return compiler.preparer.quote(name) # type: ignore[arg-type]
+
+
+def format_server_default(
+ compiler: DDLCompiler,
+ default: Optional[_ServerDefault],
+) -> str:
+ return compiler.get_column_default_string(
+ Column("x", Integer, server_default=default)
+ )
+
+
+def format_type(compiler: DDLCompiler, type_: TypeEngine) -> str:
+ return compiler.dialect.type_compiler.process(type_)
+
+
+def alter_table(
+ compiler: DDLCompiler,
+ name: str,
+ schema: Optional[str],
+) -> str:
+ return "ALTER TABLE %s" % format_table_name(compiler, name, schema)
+
+
+def drop_column(compiler: DDLCompiler, name: str, **kw) -> str:
+ return "DROP COLUMN %s" % format_column_name(compiler, name)
+
+
+def alter_column(compiler: DDLCompiler, name: str) -> str:
+ return "ALTER COLUMN %s" % format_column_name(compiler, name)
+
+
+def add_column(compiler: DDLCompiler, column: Column[Any], **kw) -> str:
+ text = "ADD COLUMN %s" % compiler.get_column_specification(column, **kw)
+
+ const = " ".join(
+ compiler.process(constraint) for constraint in column.constraints
+ )
+ if const:
+ text += " " + const
+
+ return text
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/impl.py b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/impl.py
new file mode 100644
index 00000000..8a7c75d4
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/impl.py
@@ -0,0 +1,747 @@
+from __future__ import annotations
+
+from collections import namedtuple
+import re
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import Iterable
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import Union
+
+from sqlalchemy import cast
+from sqlalchemy import schema
+from sqlalchemy import text
+
+from . import base
+from .. import util
+from ..util import sqla_compat
+
+if TYPE_CHECKING:
+ from typing import Literal
+ from typing import TextIO
+
+ from sqlalchemy.engine import Connection
+ from sqlalchemy.engine import Dialect
+ from sqlalchemy.engine.cursor import CursorResult
+ from sqlalchemy.engine.reflection import Inspector
+ from sqlalchemy.sql import ClauseElement
+ from sqlalchemy.sql import Executable
+ from sqlalchemy.sql.elements import ColumnElement
+ from sqlalchemy.sql.elements import quoted_name
+ from sqlalchemy.sql.schema import Column
+ from sqlalchemy.sql.schema import Constraint
+ from sqlalchemy.sql.schema import ForeignKeyConstraint
+ from sqlalchemy.sql.schema import Index
+ from sqlalchemy.sql.schema import Table
+ from sqlalchemy.sql.schema import UniqueConstraint
+ from sqlalchemy.sql.selectable import TableClause
+ from sqlalchemy.sql.type_api import TypeEngine
+
+ from .base import _ServerDefault
+ from ..autogenerate.api import AutogenContext
+ from ..operations.batch import ApplyBatchImpl
+ from ..operations.batch import BatchOperationsImpl
+
+
+class ImplMeta(type):
+ def __init__(
+ cls,
+ classname: str,
+ bases: Tuple[Type[DefaultImpl]],
+ dict_: Dict[str, Any],
+ ):
+ newtype = type.__init__(cls, classname, bases, dict_)
+ if "__dialect__" in dict_:
+ _impls[dict_["__dialect__"]] = cls # type: ignore[assignment]
+ return newtype
+
+
+_impls: Dict[str, Type[DefaultImpl]] = {}
+
+Params = namedtuple("Params", ["token0", "tokens", "args", "kwargs"])
+
+
+class DefaultImpl(metaclass=ImplMeta):
+
+ """Provide the entrypoint for major migration operations,
+ including database-specific behavioral variances.
+
+ While individual SQL/DDL constructs already provide
+ for database-specific implementations, variances here
+ allow for entirely different sequences of operations
+ to take place for a particular migration, such as
+ SQL Server's special 'IDENTITY INSERT' step for
+ bulk inserts.
+
+ """
+
+ __dialect__ = "default"
+
+ transactional_ddl = False
+ command_terminator = ";"
+ type_synonyms: Tuple[Set[str], ...] = ({"NUMERIC", "DECIMAL"},)
+ type_arg_extract: Sequence[str] = ()
+ # These attributes are deprecated in SQLAlchemy via #10247. They need to
+ # be ignored to support older version that did not use dialect kwargs.
+ # They only apply to Oracle and are replaced by oracle_order,
+ # oracle_on_null
+ identity_attrs_ignore: Tuple[str, ...] = ("order", "on_null")
+
+ def __init__(
+ self,
+ dialect: Dialect,
+ connection: Optional[Connection],
+ as_sql: bool,
+ transactional_ddl: Optional[bool],
+ output_buffer: Optional[TextIO],
+ context_opts: Dict[str, Any],
+ ) -> None:
+ self.dialect = dialect
+ self.connection = connection
+ self.as_sql = as_sql
+ self.literal_binds = context_opts.get("literal_binds", False)
+
+ self.output_buffer = output_buffer
+ self.memo: dict = {}
+ self.context_opts = context_opts
+ if transactional_ddl is not None:
+ self.transactional_ddl = transactional_ddl
+
+ if self.literal_binds:
+ if not self.as_sql:
+ raise util.CommandError(
+ "Can't use literal_binds setting without as_sql mode"
+ )
+
+ @classmethod
+ def get_by_dialect(cls, dialect: Dialect) -> Type[DefaultImpl]:
+ return _impls[dialect.name]
+
+ def static_output(self, text: str) -> None:
+ assert self.output_buffer is not None
+ self.output_buffer.write(text + "\n\n")
+ self.output_buffer.flush()
+
+ def requires_recreate_in_batch(
+ self, batch_op: BatchOperationsImpl
+ ) -> bool:
+ """Return True if the given :class:`.BatchOperationsImpl`
+ would need the table to be recreated and copied in order to
+ proceed.
+
+ Normally, only returns True on SQLite when operations other
+ than add_column are present.
+
+ """
+ return False
+
+ def prep_table_for_batch(
+ self, batch_impl: ApplyBatchImpl, table: Table
+ ) -> None:
+ """perform any operations needed on a table before a new
+ one is created to replace it in batch mode.
+
+ the PG dialect uses this to drop constraints on the table
+ before the new one uses those same names.
+
+ """
+
+ @property
+ def bind(self) -> Optional[Connection]:
+ return self.connection
+
+ def _exec(
+ self,
+ construct: Union[Executable, str],
+ execution_options: Optional[dict[str, Any]] = None,
+ multiparams: Sequence[dict] = (),
+ params: Dict[str, Any] = util.immutabledict(),
+ ) -> Optional[CursorResult]:
+ if isinstance(construct, str):
+ construct = text(construct)
+ if self.as_sql:
+ if multiparams or params:
+ # TODO: coverage
+ raise Exception("Execution arguments not allowed with as_sql")
+
+ compile_kw: dict[str, Any]
+ if self.literal_binds and not isinstance(
+ construct, schema.DDLElement
+ ):
+ compile_kw = dict(compile_kwargs={"literal_binds": True})
+ else:
+ compile_kw = {}
+
+ if TYPE_CHECKING:
+ assert isinstance(construct, ClauseElement)
+ compiled = construct.compile(dialect=self.dialect, **compile_kw)
+ self.static_output(
+ str(compiled).replace("\t", " ").strip()
+ + self.command_terminator
+ )
+ return None
+ else:
+ conn = self.connection
+ assert conn is not None
+ if execution_options:
+ conn = conn.execution_options(**execution_options)
+ if params:
+ assert isinstance(multiparams, tuple)
+ multiparams += (params,)
+
+ return conn.execute(construct, multiparams)
+
+ def execute(
+ self,
+ sql: Union[Executable, str],
+ execution_options: Optional[dict[str, Any]] = None,
+ ) -> None:
+ self._exec(sql, execution_options)
+
+ def alter_column(
+ self,
+ table_name: str,
+ column_name: str,
+ nullable: Optional[bool] = None,
+ server_default: Union[_ServerDefault, Literal[False]] = False,
+ name: Optional[str] = None,
+ type_: Optional[TypeEngine] = None,
+ schema: Optional[str] = None,
+ autoincrement: Optional[bool] = None,
+ comment: Optional[Union[str, Literal[False]]] = False,
+ existing_comment: Optional[str] = None,
+ existing_type: Optional[TypeEngine] = None,
+ existing_server_default: Optional[_ServerDefault] = None,
+ existing_nullable: Optional[bool] = None,
+ existing_autoincrement: Optional[bool] = None,
+ **kw: Any,
+ ) -> None:
+ if autoincrement is not None or existing_autoincrement is not None:
+ util.warn(
+ "autoincrement and existing_autoincrement "
+ "only make sense for MySQL",
+ stacklevel=3,
+ )
+ if nullable is not None:
+ self._exec(
+ base.ColumnNullable(
+ table_name,
+ column_name,
+ nullable,
+ schema=schema,
+ existing_type=existing_type,
+ existing_server_default=existing_server_default,
+ existing_nullable=existing_nullable,
+ existing_comment=existing_comment,
+ )
+ )
+ if server_default is not False:
+ kw = {}
+ cls_: Type[
+ Union[
+ base.ComputedColumnDefault,
+ base.IdentityColumnDefault,
+ base.ColumnDefault,
+ ]
+ ]
+ if sqla_compat._server_default_is_computed(
+ server_default, existing_server_default
+ ):
+ cls_ = base.ComputedColumnDefault
+ elif sqla_compat._server_default_is_identity(
+ server_default, existing_server_default
+ ):
+ cls_ = base.IdentityColumnDefault
+ kw["impl"] = self
+ else:
+ cls_ = base.ColumnDefault
+ self._exec(
+ cls_(
+ table_name,
+ column_name,
+ server_default, # type:ignore[arg-type]
+ schema=schema,
+ existing_type=existing_type,
+ existing_server_default=existing_server_default,
+ existing_nullable=existing_nullable,
+ existing_comment=existing_comment,
+ **kw,
+ )
+ )
+ if type_ is not None:
+ self._exec(
+ base.ColumnType(
+ table_name,
+ column_name,
+ type_,
+ schema=schema,
+ existing_type=existing_type,
+ existing_server_default=existing_server_default,
+ existing_nullable=existing_nullable,
+ existing_comment=existing_comment,
+ )
+ )
+
+ if comment is not False:
+ self._exec(
+ base.ColumnComment(
+ table_name,
+ column_name,
+ comment,
+ schema=schema,
+ existing_type=existing_type,
+ existing_server_default=existing_server_default,
+ existing_nullable=existing_nullable,
+ existing_comment=existing_comment,
+ )
+ )
+
+ # do the new name last ;)
+ if name is not None:
+ self._exec(
+ base.ColumnName(
+ table_name,
+ column_name,
+ name,
+ schema=schema,
+ existing_type=existing_type,
+ existing_server_default=existing_server_default,
+ existing_nullable=existing_nullable,
+ )
+ )
+
+ def add_column(
+ self,
+ table_name: str,
+ column: Column[Any],
+ schema: Optional[Union[str, quoted_name]] = None,
+ ) -> None:
+ self._exec(base.AddColumn(table_name, column, schema=schema))
+
+ def drop_column(
+ self,
+ table_name: str,
+ column: Column[Any],
+ schema: Optional[str] = None,
+ **kw,
+ ) -> None:
+ self._exec(base.DropColumn(table_name, column, schema=schema))
+
+ def add_constraint(self, const: Any) -> None:
+ if const._create_rule is None or const._create_rule(self):
+ self._exec(schema.AddConstraint(const))
+
+ def drop_constraint(self, const: Constraint) -> None:
+ self._exec(schema.DropConstraint(const))
+
+ def rename_table(
+ self,
+ old_table_name: str,
+ new_table_name: Union[str, quoted_name],
+ schema: Optional[Union[str, quoted_name]] = None,
+ ) -> None:
+ self._exec(
+ base.RenameTable(old_table_name, new_table_name, schema=schema)
+ )
+
+ def create_table(self, table: Table) -> None:
+ table.dispatch.before_create(
+ table, self.connection, checkfirst=False, _ddl_runner=self
+ )
+ self._exec(schema.CreateTable(table))
+ table.dispatch.after_create(
+ table, self.connection, checkfirst=False, _ddl_runner=self
+ )
+ for index in table.indexes:
+ self._exec(schema.CreateIndex(index))
+
+ with_comment = (
+ self.dialect.supports_comments and not self.dialect.inline_comments
+ )
+ comment = table.comment
+ if comment and with_comment:
+ self.create_table_comment(table)
+
+ for column in table.columns:
+ comment = column.comment
+ if comment and with_comment:
+ self.create_column_comment(column)
+
+ def drop_table(self, table: Table) -> None:
+ table.dispatch.before_drop(
+ table, self.connection, checkfirst=False, _ddl_runner=self
+ )
+ self._exec(schema.DropTable(table))
+ table.dispatch.after_drop(
+ table, self.connection, checkfirst=False, _ddl_runner=self
+ )
+
+ def create_index(self, index: Index, **kw: Any) -> None:
+ self._exec(schema.CreateIndex(index, **kw))
+
+ def create_table_comment(self, table: Table) -> None:
+ self._exec(schema.SetTableComment(table))
+
+ def drop_table_comment(self, table: Table) -> None:
+ self._exec(schema.DropTableComment(table))
+
+ def create_column_comment(self, column: ColumnElement[Any]) -> None:
+ self._exec(schema.SetColumnComment(column))
+
+ def drop_index(self, index: Index, **kw: Any) -> None:
+ self._exec(schema.DropIndex(index, **kw))
+
+ def bulk_insert(
+ self,
+ table: Union[TableClause, Table],
+ rows: List[dict],
+ multiinsert: bool = True,
+ ) -> None:
+ if not isinstance(rows, list):
+ raise TypeError("List expected")
+ elif rows and not isinstance(rows[0], dict):
+ raise TypeError("List of dictionaries expected")
+ if self.as_sql:
+ for row in rows:
+ self._exec(
+ sqla_compat._insert_inline(table).values(
+ **{
+ k: sqla_compat._literal_bindparam(
+ k, v, type_=table.c[k].type
+ )
+ if not isinstance(
+ v, sqla_compat._literal_bindparam
+ )
+ else v
+ for k, v in row.items()
+ }
+ )
+ )
+ else:
+ if rows:
+ if multiinsert:
+ self._exec(
+ sqla_compat._insert_inline(table), multiparams=rows
+ )
+ else:
+ for row in rows:
+ self._exec(
+ sqla_compat._insert_inline(table).values(**row)
+ )
+
+ def _tokenize_column_type(self, column: Column) -> Params:
+ definition = self.dialect.type_compiler.process(column.type).lower()
+
+ # tokenize the SQLAlchemy-generated version of a type, so that
+ # the two can be compared.
+ #
+ # examples:
+ # NUMERIC(10, 5)
+ # TIMESTAMP WITH TIMEZONE
+ # INTEGER UNSIGNED
+ # INTEGER (10) UNSIGNED
+ # INTEGER(10) UNSIGNED
+ # varchar character set utf8
+ #
+
+ tokens = re.findall(r"[\w\-_]+|\(.+?\)", definition)
+
+ term_tokens = []
+ paren_term = None
+
+ for token in tokens:
+ if re.match(r"^\(.*\)$", token):
+ paren_term = token
+ else:
+ term_tokens.append(token)
+
+ params = Params(term_tokens[0], term_tokens[1:], [], {})
+
+ if paren_term:
+ for term in re.findall("[^(),]+", paren_term):
+ if "=" in term:
+ key, val = term.split("=")
+ params.kwargs[key.strip()] = val.strip()
+ else:
+ params.args.append(term.strip())
+
+ return params
+
+ def _column_types_match(
+ self, inspector_params: Params, metadata_params: Params
+ ) -> bool:
+ if inspector_params.token0 == metadata_params.token0:
+ return True
+
+ synonyms = [{t.lower() for t in batch} for batch in self.type_synonyms]
+ inspector_all_terms = " ".join(
+ [inspector_params.token0] + inspector_params.tokens
+ )
+ metadata_all_terms = " ".join(
+ [metadata_params.token0] + metadata_params.tokens
+ )
+
+ for batch in synonyms:
+ if {inspector_all_terms, metadata_all_terms}.issubset(batch) or {
+ inspector_params.token0,
+ metadata_params.token0,
+ }.issubset(batch):
+ return True
+ return False
+
+ def _column_args_match(
+ self, inspected_params: Params, meta_params: Params
+ ) -> bool:
+ """We want to compare column parameters. However, we only want
+ to compare parameters that are set. If they both have `collation`,
+ we want to make sure they are the same. However, if only one
+ specifies it, dont flag it for being less specific
+ """
+
+ if (
+ len(meta_params.tokens) == len(inspected_params.tokens)
+ and meta_params.tokens != inspected_params.tokens
+ ):
+ return False
+
+ if (
+ len(meta_params.args) == len(inspected_params.args)
+ and meta_params.args != inspected_params.args
+ ):
+ return False
+
+ insp = " ".join(inspected_params.tokens).lower()
+ meta = " ".join(meta_params.tokens).lower()
+
+ for reg in self.type_arg_extract:
+ mi = re.search(reg, insp)
+ mm = re.search(reg, meta)
+
+ if mi and mm and mi.group(1) != mm.group(1):
+ return False
+
+ return True
+
+ def compare_type(
+ self, inspector_column: Column[Any], metadata_column: Column
+ ) -> bool:
+ """Returns True if there ARE differences between the types of the two
+ columns. Takes impl.type_synonyms into account between retrospected
+ and metadata types
+ """
+ inspector_params = self._tokenize_column_type(inspector_column)
+ metadata_params = self._tokenize_column_type(metadata_column)
+
+ if not self._column_types_match(inspector_params, metadata_params):
+ return True
+ if not self._column_args_match(inspector_params, metadata_params):
+ return True
+ return False
+
+ def compare_server_default(
+ self,
+ inspector_column,
+ metadata_column,
+ rendered_metadata_default,
+ rendered_inspector_default,
+ ):
+ return rendered_inspector_default != rendered_metadata_default
+
+ def correct_for_autogen_constraints(
+ self,
+ conn_uniques: Set[UniqueConstraint],
+ conn_indexes: Set[Index],
+ metadata_unique_constraints: Set[UniqueConstraint],
+ metadata_indexes: Set[Index],
+ ) -> None:
+ pass
+
+ def cast_for_batch_migrate(self, existing, existing_transfer, new_type):
+ if existing.type._type_affinity is not new_type._type_affinity:
+ existing_transfer["expr"] = cast(
+ existing_transfer["expr"], new_type
+ )
+
+ def render_ddl_sql_expr(
+ self, expr: ClauseElement, is_server_default: bool = False, **kw: Any
+ ) -> str:
+ """Render a SQL expression that is typically a server default,
+ index expression, etc.
+
+ """
+
+ compile_kw = {"literal_binds": True, "include_table": False}
+
+ return str(
+ expr.compile(dialect=self.dialect, compile_kwargs=compile_kw)
+ )
+
+ def _compat_autogen_column_reflect(self, inspector: Inspector) -> Callable:
+ return self.autogen_column_reflect
+
+ def correct_for_autogen_foreignkeys(
+ self,
+ conn_fks: Set[ForeignKeyConstraint],
+ metadata_fks: Set[ForeignKeyConstraint],
+ ) -> None:
+ pass
+
+ def autogen_column_reflect(self, inspector, table, column_info):
+ """A hook that is attached to the 'column_reflect' event for when
+ a Table is reflected from the database during the autogenerate
+ process.
+
+ Dialects can elect to modify the information gathered here.
+
+ """
+
+ def start_migrations(self) -> None:
+ """A hook called when :meth:`.EnvironmentContext.run_migrations`
+ is called.
+
+ Implementations can set up per-migration-run state here.
+
+ """
+
+ def emit_begin(self) -> None:
+ """Emit the string ``BEGIN``, or the backend-specific
+ equivalent, on the current connection context.
+
+ This is used in offline mode and typically
+ via :meth:`.EnvironmentContext.begin_transaction`.
+
+ """
+ self.static_output("BEGIN" + self.command_terminator)
+
+ def emit_commit(self) -> None:
+ """Emit the string ``COMMIT``, or the backend-specific
+ equivalent, on the current connection context.
+
+ This is used in offline mode and typically
+ via :meth:`.EnvironmentContext.begin_transaction`.
+
+ """
+ self.static_output("COMMIT" + self.command_terminator)
+
+ def render_type(
+ self, type_obj: TypeEngine, autogen_context: AutogenContext
+ ) -> Union[str, Literal[False]]:
+ return False
+
+ def _compare_identity_default(self, metadata_identity, inspector_identity):
+ # ignored contains the attributes that were not considered
+ # because assumed to their default values in the db.
+ diff, ignored = _compare_identity_options(
+ metadata_identity,
+ inspector_identity,
+ sqla_compat.Identity(),
+ skip={"always"},
+ )
+
+ meta_always = getattr(metadata_identity, "always", None)
+ inspector_always = getattr(inspector_identity, "always", None)
+ # None and False are the same in this comparison
+ if bool(meta_always) != bool(inspector_always):
+ diff.add("always")
+
+ diff.difference_update(self.identity_attrs_ignore)
+
+ # returns 3 values:
+ return (
+ # different identity attributes
+ diff,
+ # ignored identity attributes
+ ignored,
+ # if the two identity should be considered different
+ bool(diff) or bool(metadata_identity) != bool(inspector_identity),
+ )
+
+ def create_index_sig(self, index: Index) -> Tuple[Any, ...]:
+ # order of col matters in an index
+ return tuple(col.name for col in index.columns)
+
+ def create_unique_constraint_sig(
+ self, const: UniqueConstraint
+ ) -> Tuple[Any, ...]:
+ # order of col does not matters in an unique constraint
+ return tuple(sorted([col.name for col in const.columns]))
+
+ def _skip_functional_indexes(self, metadata_indexes, conn_indexes):
+ conn_indexes_by_name = {c.name: c for c in conn_indexes}
+
+ for idx in list(metadata_indexes):
+ if idx.name in conn_indexes_by_name:
+ continue
+ iex = sqla_compat.is_expression_index(idx)
+ if iex:
+ util.warn(
+ "autogenerate skipping metadata-specified "
+ "expression-based index "
+ f"{idx.name!r}; dialect {self.__dialect__!r} under "
+ f"SQLAlchemy {sqla_compat.sqlalchemy_version} can't "
+ "reflect these indexes so they can't be compared"
+ )
+ metadata_indexes.discard(idx)
+
+ def adjust_reflected_dialect_options(
+ self, reflected_object: Dict[str, Any], kind: str
+ ) -> Dict[str, Any]:
+ return reflected_object.get("dialect_options", {})
+
+
+def _compare_identity_options(
+ metadata_io: Union[schema.Identity, schema.Sequence, None],
+ inspector_io: Union[schema.Identity, schema.Sequence, None],
+ default_io: Union[schema.Identity, schema.Sequence],
+ skip: Set[str],
+):
+ # this can be used for identity or sequence compare.
+ # default_io is an instance of IdentityOption with all attributes to the
+ # default value.
+ meta_d = sqla_compat._get_identity_options_dict(metadata_io)
+ insp_d = sqla_compat._get_identity_options_dict(inspector_io)
+
+ diff = set()
+ ignored_attr = set()
+
+ def check_dicts(
+ meta_dict: Mapping[str, Any],
+ insp_dict: Mapping[str, Any],
+ default_dict: Mapping[str, Any],
+ attrs: Iterable[str],
+ ):
+ for attr in set(attrs).difference(skip):
+ meta_value = meta_dict.get(attr)
+ insp_value = insp_dict.get(attr)
+ if insp_value != meta_value:
+ default_value = default_dict.get(attr)
+ if meta_value == default_value:
+ ignored_attr.add(attr)
+ else:
+ diff.add(attr)
+
+ check_dicts(
+ meta_d,
+ insp_d,
+ sqla_compat._get_identity_options_dict(default_io),
+ set(meta_d).union(insp_d),
+ )
+ if sqla_compat.identity_has_dialect_kwargs:
+ # use only the dialect kwargs in inspector_io since metadata_io
+ # can have options for many backends
+ check_dicts(
+ getattr(metadata_io, "dialect_kwargs", {}),
+ getattr(inspector_io, "dialect_kwargs", {}),
+ default_io.dialect_kwargs, # type: ignore[union-attr]
+ getattr(inspector_io, "dialect_kwargs", {}),
+ )
+
+ return diff, ignored_attr
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/mssql.py b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/mssql.py
new file mode 100644
index 00000000..9b0fff88
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/mssql.py
@@ -0,0 +1,416 @@
+from __future__ import annotations
+
+import re
+from typing import Any
+from typing import Dict
+from typing import List
+from typing import Optional
+from typing import TYPE_CHECKING
+from typing import Union
+
+from sqlalchemy import types as sqltypes
+from sqlalchemy.ext.compiler import compiles
+from sqlalchemy.schema import Column
+from sqlalchemy.schema import CreateIndex
+from sqlalchemy.sql.base import Executable
+from sqlalchemy.sql.elements import ClauseElement
+
+from .base import AddColumn
+from .base import alter_column
+from .base import alter_table
+from .base import ColumnDefault
+from .base import ColumnName
+from .base import ColumnNullable
+from .base import ColumnType
+from .base import format_column_name
+from .base import format_server_default
+from .base import format_table_name
+from .base import format_type
+from .base import RenameTable
+from .impl import DefaultImpl
+from .. import util
+from ..util import sqla_compat
+
+if TYPE_CHECKING:
+ from typing import Literal
+
+ from sqlalchemy.dialects.mssql.base import MSDDLCompiler
+ from sqlalchemy.dialects.mssql.base import MSSQLCompiler
+ from sqlalchemy.engine.cursor import CursorResult
+ from sqlalchemy.sql.schema import Index
+ from sqlalchemy.sql.schema import Table
+ from sqlalchemy.sql.selectable import TableClause
+ from sqlalchemy.sql.type_api import TypeEngine
+
+ from .base import _ServerDefault
+
+
+class MSSQLImpl(DefaultImpl):
+ __dialect__ = "mssql"
+ transactional_ddl = True
+ batch_separator = "GO"
+
+ type_synonyms = DefaultImpl.type_synonyms + ({"VARCHAR", "NVARCHAR"},)
+ identity_attrs_ignore = DefaultImpl.identity_attrs_ignore + (
+ "minvalue",
+ "maxvalue",
+ "nominvalue",
+ "nomaxvalue",
+ "cycle",
+ "cache",
+ )
+
+ def __init__(self, *arg, **kw) -> None:
+ super().__init__(*arg, **kw)
+ self.batch_separator = self.context_opts.get(
+ "mssql_batch_separator", self.batch_separator
+ )
+
+ def _exec(self, construct: Any, *args, **kw) -> Optional[CursorResult]:
+ result = super()._exec(construct, *args, **kw)
+ if self.as_sql and self.batch_separator:
+ self.static_output(self.batch_separator)
+ return result
+
+ def emit_begin(self) -> None:
+ self.static_output("BEGIN TRANSACTION" + self.command_terminator)
+
+ def emit_commit(self) -> None:
+ super().emit_commit()
+ if self.as_sql and self.batch_separator:
+ self.static_output(self.batch_separator)
+
+ def alter_column( # type:ignore[override]
+ self,
+ table_name: str,
+ column_name: str,
+ nullable: Optional[bool] = None,
+ server_default: Optional[
+ Union[_ServerDefault, Literal[False]]
+ ] = False,
+ name: Optional[str] = None,
+ type_: Optional[TypeEngine] = None,
+ schema: Optional[str] = None,
+ existing_type: Optional[TypeEngine] = None,
+ existing_server_default: Optional[_ServerDefault] = None,
+ existing_nullable: Optional[bool] = None,
+ **kw: Any,
+ ) -> None:
+ if nullable is not None:
+ if type_ is not None:
+ # the NULL/NOT NULL alter will handle
+ # the type alteration
+ existing_type = type_
+ type_ = None
+ elif existing_type is None:
+ raise util.CommandError(
+ "MS-SQL ALTER COLUMN operations "
+ "with NULL or NOT NULL require the "
+ "existing_type or a new type_ be passed."
+ )
+ elif existing_nullable is not None and type_ is not None:
+ nullable = existing_nullable
+
+ # the NULL/NOT NULL alter will handle
+ # the type alteration
+ existing_type = type_
+ type_ = None
+
+ elif type_ is not None:
+ util.warn(
+ "MS-SQL ALTER COLUMN operations that specify type_= "
+ "should also specify a nullable= or "
+ "existing_nullable= argument to avoid implicit conversion "
+ "of NOT NULL columns to NULL."
+ )
+
+ used_default = False
+ if sqla_compat._server_default_is_identity(
+ server_default, existing_server_default
+ ) or sqla_compat._server_default_is_computed(
+ server_default, existing_server_default
+ ):
+ used_default = True
+ kw["server_default"] = server_default
+ kw["existing_server_default"] = existing_server_default
+
+ super().alter_column(
+ table_name,
+ column_name,
+ nullable=nullable,
+ type_=type_,
+ schema=schema,
+ existing_type=existing_type,
+ existing_nullable=existing_nullable,
+ **kw,
+ )
+
+ if server_default is not False and used_default is False:
+ if existing_server_default is not False or server_default is None:
+ self._exec(
+ _ExecDropConstraint(
+ table_name,
+ column_name,
+ "sys.default_constraints",
+ schema,
+ )
+ )
+ if server_default is not None:
+ super().alter_column(
+ table_name,
+ column_name,
+ schema=schema,
+ server_default=server_default,
+ )
+
+ if name is not None:
+ super().alter_column(
+ table_name, column_name, schema=schema, name=name
+ )
+
+ def create_index(self, index: Index, **kw: Any) -> None:
+ # this likely defaults to None if not present, so get()
+ # should normally not return the default value. being
+ # defensive in any case
+ mssql_include = index.kwargs.get("mssql_include", None) or ()
+ assert index.table is not None
+ for col in mssql_include:
+ if col not in index.table.c:
+ index.table.append_column(Column(col, sqltypes.NullType))
+ self._exec(CreateIndex(index, **kw))
+
+ def bulk_insert( # type:ignore[override]
+ self, table: Union[TableClause, Table], rows: List[dict], **kw: Any
+ ) -> None:
+ if self.as_sql:
+ self._exec(
+ "SET IDENTITY_INSERT %s ON"
+ % self.dialect.identifier_preparer.format_table(table)
+ )
+ super().bulk_insert(table, rows, **kw)
+ self._exec(
+ "SET IDENTITY_INSERT %s OFF"
+ % self.dialect.identifier_preparer.format_table(table)
+ )
+ else:
+ super().bulk_insert(table, rows, **kw)
+
+ def drop_column(
+ self,
+ table_name: str,
+ column: Column[Any],
+ schema: Optional[str] = None,
+ **kw,
+ ) -> None:
+ drop_default = kw.pop("mssql_drop_default", False)
+ if drop_default:
+ self._exec(
+ _ExecDropConstraint(
+ table_name, column, "sys.default_constraints", schema
+ )
+ )
+ drop_check = kw.pop("mssql_drop_check", False)
+ if drop_check:
+ self._exec(
+ _ExecDropConstraint(
+ table_name, column, "sys.check_constraints", schema
+ )
+ )
+ drop_fks = kw.pop("mssql_drop_foreign_key", False)
+ if drop_fks:
+ self._exec(_ExecDropFKConstraint(table_name, column, schema))
+ super().drop_column(table_name, column, schema=schema, **kw)
+
+ def compare_server_default(
+ self,
+ inspector_column,
+ metadata_column,
+ rendered_metadata_default,
+ rendered_inspector_default,
+ ):
+ if rendered_metadata_default is not None:
+ rendered_metadata_default = re.sub(
+ r"[\(\) \"\']", "", rendered_metadata_default
+ )
+
+ if rendered_inspector_default is not None:
+ # SQL Server collapses whitespace and adds arbitrary parenthesis
+ # within expressions. our only option is collapse all of it
+
+ rendered_inspector_default = re.sub(
+ r"[\(\) \"\']", "", rendered_inspector_default
+ )
+
+ return rendered_inspector_default != rendered_metadata_default
+
+ def _compare_identity_default(self, metadata_identity, inspector_identity):
+ diff, ignored, is_alter = super()._compare_identity_default(
+ metadata_identity, inspector_identity
+ )
+
+ if (
+ metadata_identity is None
+ and inspector_identity is not None
+ and not diff
+ and inspector_identity.column is not None
+ and inspector_identity.column.primary_key
+ ):
+ # mssql reflect primary keys with autoincrement as identity
+ # columns. if no different attributes are present ignore them
+ is_alter = False
+
+ return diff, ignored, is_alter
+
+ def adjust_reflected_dialect_options(
+ self, reflected_object: Dict[str, Any], kind: str
+ ) -> Dict[str, Any]:
+ options: Dict[str, Any]
+ options = reflected_object.get("dialect_options", {}).copy()
+ if not options.get("mssql_include"):
+ options.pop("mssql_include", None)
+ if not options.get("mssql_clustered"):
+ options.pop("mssql_clustered", None)
+ return options
+
+
+class _ExecDropConstraint(Executable, ClauseElement):
+ inherit_cache = False
+
+ def __init__(
+ self,
+ tname: str,
+ colname: Union[Column[Any], str],
+ type_: str,
+ schema: Optional[str],
+ ) -> None:
+ self.tname = tname
+ self.colname = colname
+ self.type_ = type_
+ self.schema = schema
+
+
+class _ExecDropFKConstraint(Executable, ClauseElement):
+ inherit_cache = False
+
+ def __init__(
+ self, tname: str, colname: Column[Any], schema: Optional[str]
+ ) -> None:
+ self.tname = tname
+ self.colname = colname
+ self.schema = schema
+
+
+@compiles(_ExecDropConstraint, "mssql")
+def _exec_drop_col_constraint(
+ element: _ExecDropConstraint, compiler: MSSQLCompiler, **kw
+) -> str:
+ schema, tname, colname, type_ = (
+ element.schema,
+ element.tname,
+ element.colname,
+ element.type_,
+ )
+ # from http://www.mssqltips.com/sqlservertip/1425/\
+ # working-with-default-constraints-in-sql-server/
+ return """declare @const_name varchar(256)
+select @const_name = QUOTENAME([name]) from %(type)s
+where parent_object_id = object_id('%(schema_dot)s%(tname)s')
+and col_name(parent_object_id, parent_column_id) = '%(colname)s'
+exec('alter table %(tname_quoted)s drop constraint ' + @const_name)""" % {
+ "type": type_,
+ "tname": tname,
+ "colname": colname,
+ "tname_quoted": format_table_name(compiler, tname, schema),
+ "schema_dot": schema + "." if schema else "",
+ }
+
+
+@compiles(_ExecDropFKConstraint, "mssql")
+def _exec_drop_col_fk_constraint(
+ element: _ExecDropFKConstraint, compiler: MSSQLCompiler, **kw
+) -> str:
+ schema, tname, colname = element.schema, element.tname, element.colname
+
+ return """declare @const_name varchar(256)
+select @const_name = QUOTENAME([name]) from
+sys.foreign_keys fk join sys.foreign_key_columns fkc
+on fk.object_id=fkc.constraint_object_id
+where fkc.parent_object_id = object_id('%(schema_dot)s%(tname)s')
+and col_name(fkc.parent_object_id, fkc.parent_column_id) = '%(colname)s'
+exec('alter table %(tname_quoted)s drop constraint ' + @const_name)""" % {
+ "tname": tname,
+ "colname": colname,
+ "tname_quoted": format_table_name(compiler, tname, schema),
+ "schema_dot": schema + "." if schema else "",
+ }
+
+
+@compiles(AddColumn, "mssql")
+def visit_add_column(element: AddColumn, compiler: MSDDLCompiler, **kw) -> str:
+ return "%s %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ mssql_add_column(compiler, element.column, **kw),
+ )
+
+
+def mssql_add_column(
+ compiler: MSDDLCompiler, column: Column[Any], **kw
+) -> str:
+ return "ADD %s" % compiler.get_column_specification(column, **kw)
+
+
+@compiles(ColumnNullable, "mssql")
+def visit_column_nullable(
+ element: ColumnNullable, compiler: MSDDLCompiler, **kw
+) -> str:
+ return "%s %s %s %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ alter_column(compiler, element.column_name),
+ format_type(compiler, element.existing_type), # type: ignore[arg-type]
+ "NULL" if element.nullable else "NOT NULL",
+ )
+
+
+@compiles(ColumnDefault, "mssql")
+def visit_column_default(
+ element: ColumnDefault, compiler: MSDDLCompiler, **kw
+) -> str:
+ # TODO: there can also be a named constraint
+ # with ADD CONSTRAINT here
+ return "%s ADD DEFAULT %s FOR %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ format_server_default(compiler, element.default),
+ format_column_name(compiler, element.column_name),
+ )
+
+
+@compiles(ColumnName, "mssql")
+def visit_rename_column(
+ element: ColumnName, compiler: MSDDLCompiler, **kw
+) -> str:
+ return "EXEC sp_rename '%s.%s', %s, 'COLUMN'" % (
+ format_table_name(compiler, element.table_name, element.schema),
+ format_column_name(compiler, element.column_name),
+ format_column_name(compiler, element.newname),
+ )
+
+
+@compiles(ColumnType, "mssql")
+def visit_column_type(
+ element: ColumnType, compiler: MSDDLCompiler, **kw
+) -> str:
+ return "%s %s %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ alter_column(compiler, element.column_name),
+ format_type(compiler, element.type_),
+ )
+
+
+@compiles(RenameTable, "mssql")
+def visit_rename_table(
+ element: RenameTable, compiler: MSDDLCompiler, **kw
+) -> str:
+ return "EXEC sp_rename '%s', %s" % (
+ format_table_name(compiler, element.table_name, element.schema),
+ format_table_name(compiler, element.new_table_name, None),
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/mysql.py b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/mysql.py
new file mode 100644
index 00000000..32ced498
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/mysql.py
@@ -0,0 +1,471 @@
+from __future__ import annotations
+
+import re
+from typing import Any
+from typing import Optional
+from typing import TYPE_CHECKING
+from typing import Union
+
+from sqlalchemy import schema
+from sqlalchemy import types as sqltypes
+from sqlalchemy.ext.compiler import compiles
+
+from .base import alter_table
+from .base import AlterColumn
+from .base import ColumnDefault
+from .base import ColumnName
+from .base import ColumnNullable
+from .base import ColumnType
+from .base import format_column_name
+from .base import format_server_default
+from .impl import DefaultImpl
+from .. import util
+from ..autogenerate import compare
+from ..util import sqla_compat
+from ..util.sqla_compat import _is_mariadb
+from ..util.sqla_compat import _is_type_bound
+
+if TYPE_CHECKING:
+ from typing import Literal
+
+ from sqlalchemy.dialects.mysql.base import MySQLDDLCompiler
+ from sqlalchemy.sql.ddl import DropConstraint
+ from sqlalchemy.sql.schema import Constraint
+ from sqlalchemy.sql.type_api import TypeEngine
+
+ from .base import _ServerDefault
+
+
+class MySQLImpl(DefaultImpl):
+ __dialect__ = "mysql"
+
+ transactional_ddl = False
+ type_synonyms = DefaultImpl.type_synonyms + (
+ {"BOOL", "TINYINT"},
+ {"JSON", "LONGTEXT"},
+ )
+ type_arg_extract = [r"character set ([\w\-_]+)", r"collate ([\w\-_]+)"]
+
+ def alter_column( # type:ignore[override]
+ self,
+ table_name: str,
+ column_name: str,
+ nullable: Optional[bool] = None,
+ server_default: Union[_ServerDefault, Literal[False]] = False,
+ name: Optional[str] = None,
+ type_: Optional[TypeEngine] = None,
+ schema: Optional[str] = None,
+ existing_type: Optional[TypeEngine] = None,
+ existing_server_default: Optional[_ServerDefault] = None,
+ existing_nullable: Optional[bool] = None,
+ autoincrement: Optional[bool] = None,
+ existing_autoincrement: Optional[bool] = None,
+ comment: Optional[Union[str, Literal[False]]] = False,
+ existing_comment: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ if sqla_compat._server_default_is_identity(
+ server_default, existing_server_default
+ ) or sqla_compat._server_default_is_computed(
+ server_default, existing_server_default
+ ):
+ # modifying computed or identity columns is not supported
+ # the default will raise
+ super().alter_column(
+ table_name,
+ column_name,
+ nullable=nullable,
+ type_=type_,
+ schema=schema,
+ existing_type=existing_type,
+ existing_nullable=existing_nullable,
+ server_default=server_default,
+ existing_server_default=existing_server_default,
+ **kw,
+ )
+ if name is not None or self._is_mysql_allowed_functional_default(
+ type_ if type_ is not None else existing_type, server_default
+ ):
+ self._exec(
+ MySQLChangeColumn(
+ table_name,
+ column_name,
+ schema=schema,
+ newname=name if name is not None else column_name,
+ nullable=nullable
+ if nullable is not None
+ else existing_nullable
+ if existing_nullable is not None
+ else True,
+ type_=type_ if type_ is not None else existing_type,
+ default=server_default
+ if server_default is not False
+ else existing_server_default,
+ autoincrement=autoincrement
+ if autoincrement is not None
+ else existing_autoincrement,
+ comment=comment
+ if comment is not False
+ else existing_comment,
+ )
+ )
+ elif (
+ nullable is not None
+ or type_ is not None
+ or autoincrement is not None
+ or comment is not False
+ ):
+ self._exec(
+ MySQLModifyColumn(
+ table_name,
+ column_name,
+ schema=schema,
+ newname=name if name is not None else column_name,
+ nullable=nullable
+ if nullable is not None
+ else existing_nullable
+ if existing_nullable is not None
+ else True,
+ type_=type_ if type_ is not None else existing_type,
+ default=server_default
+ if server_default is not False
+ else existing_server_default,
+ autoincrement=autoincrement
+ if autoincrement is not None
+ else existing_autoincrement,
+ comment=comment
+ if comment is not False
+ else existing_comment,
+ )
+ )
+ elif server_default is not False:
+ self._exec(
+ MySQLAlterDefault(
+ table_name, column_name, server_default, schema=schema
+ )
+ )
+
+ def drop_constraint(
+ self,
+ const: Constraint,
+ ) -> None:
+ if isinstance(const, schema.CheckConstraint) and _is_type_bound(const):
+ return
+
+ super().drop_constraint(const)
+
+ def _is_mysql_allowed_functional_default(
+ self,
+ type_: Optional[TypeEngine],
+ server_default: Union[_ServerDefault, Literal[False]],
+ ) -> bool:
+ return (
+ type_ is not None
+ and type_._type_affinity # type:ignore[attr-defined]
+ is sqltypes.DateTime
+ and server_default is not None
+ )
+
+ def compare_server_default(
+ self,
+ inspector_column,
+ metadata_column,
+ rendered_metadata_default,
+ rendered_inspector_default,
+ ):
+ # partially a workaround for SQLAlchemy issue #3023; if the
+ # column were created without "NOT NULL", MySQL may have added
+ # an implicit default of '0' which we need to skip
+ # TODO: this is not really covered anymore ?
+ if (
+ metadata_column.type._type_affinity is sqltypes.Integer
+ and inspector_column.primary_key
+ and not inspector_column.autoincrement
+ and not rendered_metadata_default
+ and rendered_inspector_default == "'0'"
+ ):
+ return False
+ elif (
+ rendered_inspector_default
+ and inspector_column.type._type_affinity is sqltypes.Integer
+ ):
+ rendered_inspector_default = (
+ re.sub(r"^'|'$", "", rendered_inspector_default)
+ if rendered_inspector_default is not None
+ else None
+ )
+ return rendered_inspector_default != rendered_metadata_default
+ elif (
+ rendered_metadata_default
+ and metadata_column.type._type_affinity is sqltypes.String
+ ):
+ metadata_default = re.sub(r"^'|'$", "", rendered_metadata_default)
+ return rendered_inspector_default != f"'{metadata_default}'"
+ elif rendered_inspector_default and rendered_metadata_default:
+ # adjust for "function()" vs. "FUNCTION" as can occur particularly
+ # for the CURRENT_TIMESTAMP function on newer MariaDB versions
+
+ # SQLAlchemy MySQL dialect bundles ON UPDATE into the server
+ # default; adjust for this possibly being present.
+ onupdate_ins = re.match(
+ r"(.*) (on update.*?)(?:\(\))?$",
+ rendered_inspector_default.lower(),
+ )
+ onupdate_met = re.match(
+ r"(.*) (on update.*?)(?:\(\))?$",
+ rendered_metadata_default.lower(),
+ )
+
+ if onupdate_ins:
+ if not onupdate_met:
+ return True
+ elif onupdate_ins.group(2) != onupdate_met.group(2):
+ return True
+
+ rendered_inspector_default = onupdate_ins.group(1)
+ rendered_metadata_default = onupdate_met.group(1)
+
+ return re.sub(
+ r"(.*?)(?:\(\))?$", r"\1", rendered_inspector_default.lower()
+ ) != re.sub(
+ r"(.*?)(?:\(\))?$", r"\1", rendered_metadata_default.lower()
+ )
+ else:
+ return rendered_inspector_default != rendered_metadata_default
+
+ def correct_for_autogen_constraints(
+ self,
+ conn_unique_constraints,
+ conn_indexes,
+ metadata_unique_constraints,
+ metadata_indexes,
+ ):
+ # TODO: if SQLA 1.0, make use of "duplicates_index"
+ # metadata
+ removed = set()
+ for idx in list(conn_indexes):
+ if idx.unique:
+ continue
+ # MySQL puts implicit indexes on FK columns, even if
+ # composite and even if MyISAM, so can't check this too easily.
+ # the name of the index may be the column name or it may
+ # be the name of the FK constraint.
+ for col in idx.columns:
+ if idx.name == col.name:
+ conn_indexes.remove(idx)
+ removed.add(idx.name)
+ break
+ for fk in col.foreign_keys:
+ if fk.name == idx.name:
+ conn_indexes.remove(idx)
+ removed.add(idx.name)
+ break
+ if idx.name in removed:
+ break
+
+ # then remove indexes from the "metadata_indexes"
+ # that we've removed from reflected, otherwise they come out
+ # as adds (see #202)
+ for idx in list(metadata_indexes):
+ if idx.name in removed:
+ metadata_indexes.remove(idx)
+
+ def correct_for_autogen_foreignkeys(self, conn_fks, metadata_fks):
+ conn_fk_by_sig = {
+ compare._fk_constraint_sig(fk).sig: fk for fk in conn_fks
+ }
+ metadata_fk_by_sig = {
+ compare._fk_constraint_sig(fk).sig: fk for fk in metadata_fks
+ }
+
+ for sig in set(conn_fk_by_sig).intersection(metadata_fk_by_sig):
+ mdfk = metadata_fk_by_sig[sig]
+ cnfk = conn_fk_by_sig[sig]
+ # MySQL considers RESTRICT to be the default and doesn't
+ # report on it. if the model has explicit RESTRICT and
+ # the conn FK has None, set it to RESTRICT
+ if (
+ mdfk.ondelete is not None
+ and mdfk.ondelete.lower() == "restrict"
+ and cnfk.ondelete is None
+ ):
+ cnfk.ondelete = "RESTRICT"
+ if (
+ mdfk.onupdate is not None
+ and mdfk.onupdate.lower() == "restrict"
+ and cnfk.onupdate is None
+ ):
+ cnfk.onupdate = "RESTRICT"
+
+
+class MariaDBImpl(MySQLImpl):
+ __dialect__ = "mariadb"
+
+
+class MySQLAlterDefault(AlterColumn):
+ def __init__(
+ self,
+ name: str,
+ column_name: str,
+ default: _ServerDefault,
+ schema: Optional[str] = None,
+ ) -> None:
+ super(AlterColumn, self).__init__(name, schema=schema)
+ self.column_name = column_name
+ self.default = default
+
+
+class MySQLChangeColumn(AlterColumn):
+ def __init__(
+ self,
+ name: str,
+ column_name: str,
+ schema: Optional[str] = None,
+ newname: Optional[str] = None,
+ type_: Optional[TypeEngine] = None,
+ nullable: Optional[bool] = None,
+ default: Optional[Union[_ServerDefault, Literal[False]]] = False,
+ autoincrement: Optional[bool] = None,
+ comment: Optional[Union[str, Literal[False]]] = False,
+ ) -> None:
+ super(AlterColumn, self).__init__(name, schema=schema)
+ self.column_name = column_name
+ self.nullable = nullable
+ self.newname = newname
+ self.default = default
+ self.autoincrement = autoincrement
+ self.comment = comment
+ if type_ is None:
+ raise util.CommandError(
+ "All MySQL CHANGE/MODIFY COLUMN operations "
+ "require the existing type."
+ )
+
+ self.type_ = sqltypes.to_instance(type_)
+
+
+class MySQLModifyColumn(MySQLChangeColumn):
+ pass
+
+
+@compiles(ColumnNullable, "mysql", "mariadb")
+@compiles(ColumnName, "mysql", "mariadb")
+@compiles(ColumnDefault, "mysql", "mariadb")
+@compiles(ColumnType, "mysql", "mariadb")
+def _mysql_doesnt_support_individual(element, compiler, **kw):
+ raise NotImplementedError(
+ "Individual alter column constructs not supported by MySQL"
+ )
+
+
+@compiles(MySQLAlterDefault, "mysql", "mariadb")
+def _mysql_alter_default(
+ element: MySQLAlterDefault, compiler: MySQLDDLCompiler, **kw
+) -> str:
+ return "%s ALTER COLUMN %s %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ format_column_name(compiler, element.column_name),
+ "SET DEFAULT %s" % format_server_default(compiler, element.default)
+ if element.default is not None
+ else "DROP DEFAULT",
+ )
+
+
+@compiles(MySQLModifyColumn, "mysql", "mariadb")
+def _mysql_modify_column(
+ element: MySQLModifyColumn, compiler: MySQLDDLCompiler, **kw
+) -> str:
+ return "%s MODIFY %s %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ format_column_name(compiler, element.column_name),
+ _mysql_colspec(
+ compiler,
+ nullable=element.nullable,
+ server_default=element.default,
+ type_=element.type_,
+ autoincrement=element.autoincrement,
+ comment=element.comment,
+ ),
+ )
+
+
+@compiles(MySQLChangeColumn, "mysql", "mariadb")
+def _mysql_change_column(
+ element: MySQLChangeColumn, compiler: MySQLDDLCompiler, **kw
+) -> str:
+ return "%s CHANGE %s %s %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ format_column_name(compiler, element.column_name),
+ format_column_name(compiler, element.newname),
+ _mysql_colspec(
+ compiler,
+ nullable=element.nullable,
+ server_default=element.default,
+ type_=element.type_,
+ autoincrement=element.autoincrement,
+ comment=element.comment,
+ ),
+ )
+
+
+def _mysql_colspec(
+ compiler: MySQLDDLCompiler,
+ nullable: Optional[bool],
+ server_default: Optional[Union[_ServerDefault, Literal[False]]],
+ type_: TypeEngine,
+ autoincrement: Optional[bool],
+ comment: Optional[Union[str, Literal[False]]],
+) -> str:
+ spec = "%s %s" % (
+ compiler.dialect.type_compiler.process(type_),
+ "NULL" if nullable else "NOT NULL",
+ )
+ if autoincrement:
+ spec += " AUTO_INCREMENT"
+ if server_default is not False and server_default is not None:
+ spec += " DEFAULT %s" % format_server_default(compiler, server_default)
+ if comment:
+ spec += " COMMENT %s" % compiler.sql_compiler.render_literal_value(
+ comment, sqltypes.String()
+ )
+
+ return spec
+
+
+@compiles(schema.DropConstraint, "mysql", "mariadb")
+def _mysql_drop_constraint(
+ element: DropConstraint, compiler: MySQLDDLCompiler, **kw
+) -> str:
+ """Redefine SQLAlchemy's drop constraint to
+ raise errors for invalid constraint type."""
+
+ constraint = element.element
+ if isinstance(
+ constraint,
+ (
+ schema.ForeignKeyConstraint,
+ schema.PrimaryKeyConstraint,
+ schema.UniqueConstraint,
+ ),
+ ):
+ assert not kw
+ return compiler.visit_drop_constraint(element)
+ elif isinstance(constraint, schema.CheckConstraint):
+ # note that SQLAlchemy as of 1.2 does not yet support
+ # DROP CONSTRAINT for MySQL/MariaDB, so we implement fully
+ # here.
+ if _is_mariadb(compiler.dialect):
+ return "ALTER TABLE %s DROP CONSTRAINT %s" % (
+ compiler.preparer.format_table(constraint.table),
+ compiler.preparer.format_constraint(constraint),
+ )
+ else:
+ return "ALTER TABLE %s DROP CHECK %s" % (
+ compiler.preparer.format_table(constraint.table),
+ compiler.preparer.format_constraint(constraint),
+ )
+ else:
+ raise NotImplementedError(
+ "No generic 'DROP CONSTRAINT' in MySQL - "
+ "please specify constraint type"
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/oracle.py b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/oracle.py
new file mode 100644
index 00000000..e56bb210
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/oracle.py
@@ -0,0 +1,197 @@
+from __future__ import annotations
+
+import re
+from typing import Any
+from typing import Optional
+from typing import TYPE_CHECKING
+
+from sqlalchemy.ext.compiler import compiles
+from sqlalchemy.sql import sqltypes
+
+from .base import AddColumn
+from .base import alter_table
+from .base import ColumnComment
+from .base import ColumnDefault
+from .base import ColumnName
+from .base import ColumnNullable
+from .base import ColumnType
+from .base import format_column_name
+from .base import format_server_default
+from .base import format_table_name
+from .base import format_type
+from .base import IdentityColumnDefault
+from .base import RenameTable
+from .impl import DefaultImpl
+
+if TYPE_CHECKING:
+ from sqlalchemy.dialects.oracle.base import OracleDDLCompiler
+ from sqlalchemy.engine.cursor import CursorResult
+ from sqlalchemy.sql.schema import Column
+
+
+class OracleImpl(DefaultImpl):
+ __dialect__ = "oracle"
+ transactional_ddl = False
+ batch_separator = "/"
+ command_terminator = ""
+ type_synonyms = DefaultImpl.type_synonyms + (
+ {"VARCHAR", "VARCHAR2"},
+ {"BIGINT", "INTEGER", "SMALLINT", "DECIMAL", "NUMERIC", "NUMBER"},
+ {"DOUBLE", "FLOAT", "DOUBLE_PRECISION"},
+ )
+ identity_attrs_ignore = ()
+
+ def __init__(self, *arg, **kw) -> None:
+ super().__init__(*arg, **kw)
+ self.batch_separator = self.context_opts.get(
+ "oracle_batch_separator", self.batch_separator
+ )
+
+ def _exec(self, construct: Any, *args, **kw) -> Optional[CursorResult]:
+ result = super()._exec(construct, *args, **kw)
+ if self.as_sql and self.batch_separator:
+ self.static_output(self.batch_separator)
+ return result
+
+ def compare_server_default(
+ self,
+ inspector_column,
+ metadata_column,
+ rendered_metadata_default,
+ rendered_inspector_default,
+ ):
+ if rendered_metadata_default is not None:
+ rendered_metadata_default = re.sub(
+ r"^\((.+)\)$", r"\1", rendered_metadata_default
+ )
+
+ rendered_metadata_default = re.sub(
+ r"^\"?'(.+)'\"?$", r"\1", rendered_metadata_default
+ )
+
+ if rendered_inspector_default is not None:
+ rendered_inspector_default = re.sub(
+ r"^\((.+)\)$", r"\1", rendered_inspector_default
+ )
+
+ rendered_inspector_default = re.sub(
+ r"^\"?'(.+)'\"?$", r"\1", rendered_inspector_default
+ )
+
+ rendered_inspector_default = rendered_inspector_default.strip()
+ return rendered_inspector_default != rendered_metadata_default
+
+ def emit_begin(self) -> None:
+ self._exec("SET TRANSACTION READ WRITE")
+
+ def emit_commit(self) -> None:
+ self._exec("COMMIT")
+
+
+@compiles(AddColumn, "oracle")
+def visit_add_column(
+ element: AddColumn, compiler: OracleDDLCompiler, **kw
+) -> str:
+ return "%s %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ add_column(compiler, element.column, **kw),
+ )
+
+
+@compiles(ColumnNullable, "oracle")
+def visit_column_nullable(
+ element: ColumnNullable, compiler: OracleDDLCompiler, **kw
+) -> str:
+ return "%s %s %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ alter_column(compiler, element.column_name),
+ "NULL" if element.nullable else "NOT NULL",
+ )
+
+
+@compiles(ColumnType, "oracle")
+def visit_column_type(
+ element: ColumnType, compiler: OracleDDLCompiler, **kw
+) -> str:
+ return "%s %s %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ alter_column(compiler, element.column_name),
+ "%s" % format_type(compiler, element.type_),
+ )
+
+
+@compiles(ColumnName, "oracle")
+def visit_column_name(
+ element: ColumnName, compiler: OracleDDLCompiler, **kw
+) -> str:
+ return "%s RENAME COLUMN %s TO %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ format_column_name(compiler, element.column_name),
+ format_column_name(compiler, element.newname),
+ )
+
+
+@compiles(ColumnDefault, "oracle")
+def visit_column_default(
+ element: ColumnDefault, compiler: OracleDDLCompiler, **kw
+) -> str:
+ return "%s %s %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ alter_column(compiler, element.column_name),
+ "DEFAULT %s" % format_server_default(compiler, element.default)
+ if element.default is not None
+ else "DEFAULT NULL",
+ )
+
+
+@compiles(ColumnComment, "oracle")
+def visit_column_comment(
+ element: ColumnComment, compiler: OracleDDLCompiler, **kw
+) -> str:
+ ddl = "COMMENT ON COLUMN {table_name}.{column_name} IS {comment}"
+
+ comment = compiler.sql_compiler.render_literal_value(
+ (element.comment if element.comment is not None else ""),
+ sqltypes.String(),
+ )
+
+ return ddl.format(
+ table_name=element.table_name,
+ column_name=element.column_name,
+ comment=comment,
+ )
+
+
+@compiles(RenameTable, "oracle")
+def visit_rename_table(
+ element: RenameTable, compiler: OracleDDLCompiler, **kw
+) -> str:
+ return "%s RENAME TO %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ format_table_name(compiler, element.new_table_name, None),
+ )
+
+
+def alter_column(compiler: OracleDDLCompiler, name: str) -> str:
+ return "MODIFY %s" % format_column_name(compiler, name)
+
+
+def add_column(compiler: OracleDDLCompiler, column: Column[Any], **kw) -> str:
+ return "ADD %s" % compiler.get_column_specification(column, **kw)
+
+
+@compiles(IdentityColumnDefault, "oracle")
+def visit_identity_column(
+ element: IdentityColumnDefault, compiler: OracleDDLCompiler, **kw
+):
+ text = "%s %s " % (
+ alter_table(compiler, element.table_name, element.schema),
+ alter_column(compiler, element.column_name),
+ )
+ if element.default is None:
+ # drop identity
+ text += "DROP IDENTITY"
+ return text
+ else:
+ text += compiler.visit_identity_column(element.default)
+ return text
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/postgresql.py b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/postgresql.py
new file mode 100644
index 00000000..949e2562
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/postgresql.py
@@ -0,0 +1,774 @@
+from __future__ import annotations
+
+import logging
+import re
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import List
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+from sqlalchemy import Column
+from sqlalchemy import literal_column
+from sqlalchemy import Numeric
+from sqlalchemy import text
+from sqlalchemy import types as sqltypes
+from sqlalchemy.dialects.postgresql import BIGINT
+from sqlalchemy.dialects.postgresql import ExcludeConstraint
+from sqlalchemy.dialects.postgresql import INTEGER
+from sqlalchemy.schema import CreateIndex
+from sqlalchemy.sql import operators
+from sqlalchemy.sql.elements import ColumnClause
+from sqlalchemy.sql.elements import TextClause
+from sqlalchemy.sql.elements import UnaryExpression
+from sqlalchemy.sql.functions import FunctionElement
+from sqlalchemy.types import NULLTYPE
+
+from .base import alter_column
+from .base import alter_table
+from .base import AlterColumn
+from .base import ColumnComment
+from .base import compiles
+from .base import format_column_name
+from .base import format_table_name
+from .base import format_type
+from .base import IdentityColumnDefault
+from .base import RenameTable
+from .impl import DefaultImpl
+from .. import util
+from ..autogenerate import render
+from ..operations import ops
+from ..operations import schemaobj
+from ..operations.base import BatchOperations
+from ..operations.base import Operations
+from ..util import sqla_compat
+
+if TYPE_CHECKING:
+ from typing import Literal
+
+ from sqlalchemy import Index
+ from sqlalchemy import UniqueConstraint
+ from sqlalchemy.dialects.postgresql.array import ARRAY
+ from sqlalchemy.dialects.postgresql.base import PGDDLCompiler
+ from sqlalchemy.dialects.postgresql.hstore import HSTORE
+ from sqlalchemy.dialects.postgresql.json import JSON
+ from sqlalchemy.dialects.postgresql.json import JSONB
+ from sqlalchemy.sql.elements import ClauseElement
+ from sqlalchemy.sql.elements import ColumnElement
+ from sqlalchemy.sql.elements import quoted_name
+ from sqlalchemy.sql.schema import MetaData
+ from sqlalchemy.sql.schema import Table
+ from sqlalchemy.sql.type_api import TypeEngine
+
+ from .base import _ServerDefault
+ from ..autogenerate.api import AutogenContext
+ from ..autogenerate.render import _f_name
+ from ..runtime.migration import MigrationContext
+
+
+log = logging.getLogger(__name__)
+
+
+class PostgresqlImpl(DefaultImpl):
+ __dialect__ = "postgresql"
+ transactional_ddl = True
+ type_synonyms = DefaultImpl.type_synonyms + (
+ {"FLOAT", "DOUBLE PRECISION"},
+ )
+
+ def create_index(self, index: Index, **kw: Any) -> None:
+ # this likely defaults to None if not present, so get()
+ # should normally not return the default value. being
+ # defensive in any case
+ postgresql_include = index.kwargs.get("postgresql_include", None) or ()
+ for col in postgresql_include:
+ if col not in index.table.c: # type: ignore[union-attr]
+ index.table.append_column( # type: ignore[union-attr]
+ Column(col, sqltypes.NullType)
+ )
+ self._exec(CreateIndex(index, **kw))
+
+ def prep_table_for_batch(self, batch_impl, table):
+ for constraint in table.constraints:
+ if (
+ constraint.name is not None
+ and constraint.name in batch_impl.named_constraints
+ ):
+ self.drop_constraint(constraint)
+
+ def compare_server_default(
+ self,
+ inspector_column,
+ metadata_column,
+ rendered_metadata_default,
+ rendered_inspector_default,
+ ):
+ # don't do defaults for SERIAL columns
+ if (
+ metadata_column.primary_key
+ and metadata_column is metadata_column.table._autoincrement_column
+ ):
+ return False
+
+ conn_col_default = rendered_inspector_default
+
+ defaults_equal = conn_col_default == rendered_metadata_default
+ if defaults_equal:
+ return False
+
+ if None in (
+ conn_col_default,
+ rendered_metadata_default,
+ metadata_column.server_default,
+ ):
+ return not defaults_equal
+
+ metadata_default = metadata_column.server_default.arg
+
+ if isinstance(metadata_default, str):
+ if not isinstance(inspector_column.type, Numeric):
+ metadata_default = re.sub(r"^'|'$", "", metadata_default)
+ metadata_default = f"'{metadata_default}'"
+
+ metadata_default = literal_column(metadata_default)
+
+ # run a real compare against the server
+ return not self.connection.scalar(
+ sqla_compat._select(
+ literal_column(conn_col_default) == metadata_default
+ )
+ )
+
+ def alter_column( # type:ignore[override]
+ self,
+ table_name: str,
+ column_name: str,
+ nullable: Optional[bool] = None,
+ server_default: Union[_ServerDefault, Literal[False]] = False,
+ name: Optional[str] = None,
+ type_: Optional[TypeEngine] = None,
+ schema: Optional[str] = None,
+ autoincrement: Optional[bool] = None,
+ existing_type: Optional[TypeEngine] = None,
+ existing_server_default: Optional[_ServerDefault] = None,
+ existing_nullable: Optional[bool] = None,
+ existing_autoincrement: Optional[bool] = None,
+ **kw: Any,
+ ) -> None:
+ using = kw.pop("postgresql_using", None)
+
+ if using is not None and type_ is None:
+ raise util.CommandError(
+ "postgresql_using must be used with the type_ parameter"
+ )
+
+ if type_ is not None:
+ self._exec(
+ PostgresqlColumnType(
+ table_name,
+ column_name,
+ type_,
+ schema=schema,
+ using=using,
+ existing_type=existing_type,
+ existing_server_default=existing_server_default,
+ existing_nullable=existing_nullable,
+ )
+ )
+
+ super().alter_column(
+ table_name,
+ column_name,
+ nullable=nullable,
+ server_default=server_default,
+ name=name,
+ schema=schema,
+ autoincrement=autoincrement,
+ existing_type=existing_type,
+ existing_server_default=existing_server_default,
+ existing_nullable=existing_nullable,
+ existing_autoincrement=existing_autoincrement,
+ **kw,
+ )
+
+ def autogen_column_reflect(self, inspector, table, column_info):
+ if column_info.get("default") and isinstance(
+ column_info["type"], (INTEGER, BIGINT)
+ ):
+ seq_match = re.match(
+ r"nextval\('(.+?)'::regclass\)", column_info["default"]
+ )
+ if seq_match:
+ info = sqla_compat._exec_on_inspector(
+ inspector,
+ text(
+ "select c.relname, a.attname "
+ "from pg_class as c join "
+ "pg_depend d on d.objid=c.oid and "
+ "d.classid='pg_class'::regclass and "
+ "d.refclassid='pg_class'::regclass "
+ "join pg_class t on t.oid=d.refobjid "
+ "join pg_attribute a on a.attrelid=t.oid and "
+ "a.attnum=d.refobjsubid "
+ "where c.relkind='S' and c.relname=:seqname"
+ ),
+ seqname=seq_match.group(1),
+ ).first()
+ if info:
+ seqname, colname = info
+ if colname == column_info["name"]:
+ log.info(
+ "Detected sequence named '%s' as "
+ "owned by integer column '%s(%s)', "
+ "assuming SERIAL and omitting",
+ seqname,
+ table.name,
+ colname,
+ )
+ # sequence, and the owner is this column,
+ # its a SERIAL - whack it!
+ del column_info["default"]
+
+ def correct_for_autogen_constraints(
+ self,
+ conn_unique_constraints,
+ conn_indexes,
+ metadata_unique_constraints,
+ metadata_indexes,
+ ):
+ doubled_constraints = {
+ index
+ for index in conn_indexes
+ if index.info.get("duplicates_constraint")
+ }
+
+ for ix in doubled_constraints:
+ conn_indexes.remove(ix)
+
+ if not sqla_compat.sqla_2:
+ self._skip_functional_indexes(metadata_indexes, conn_indexes)
+
+ def _cleanup_index_expr(
+ self, index: Index, expr: str, remove_suffix: str
+ ) -> str:
+ # start = expr
+ expr = expr.lower().replace('"', "").replace("'", "")
+ if index.table is not None:
+ # should not be needed, since include_table=False is in compile
+ expr = expr.replace(f"{index.table.name.lower()}.", "")
+
+ while expr and expr[0] == "(" and expr[-1] == ")":
+ expr = expr[1:-1]
+ if "::" in expr:
+ # strip :: cast. types can have spaces in them
+ expr = re.sub(r"(::[\w ]+\w)", "", expr)
+
+ if remove_suffix and expr.endswith(remove_suffix):
+ expr = expr[: -len(remove_suffix)]
+
+ # print(f"START: {start} END: {expr}")
+ return expr
+
+ def _default_modifiers(self, exp: ClauseElement) -> str:
+ to_remove = ""
+ while isinstance(exp, UnaryExpression):
+ if exp.modifier is None:
+ exp = exp.element
+ else:
+ op = exp.modifier
+ if isinstance(exp.element, UnaryExpression):
+ inner_op = exp.element.modifier
+ else:
+ inner_op = None
+ if inner_op is None:
+ if op == operators.asc_op:
+ # default is asc
+ to_remove = " asc"
+ elif op == operators.nullslast_op:
+ # default is nulls last
+ to_remove = " nulls last"
+ else:
+ if (
+ inner_op == operators.asc_op
+ and op == operators.nullslast_op
+ ):
+ # default is asc nulls last
+ to_remove = " asc nulls last"
+ elif (
+ inner_op == operators.desc_op
+ and op == operators.nullsfirst_op
+ ):
+ # default for desc is nulls first
+ to_remove = " nulls first"
+ break
+ return to_remove
+
+ def _dialect_sig(
+ self, item: Union[Index, UniqueConstraint]
+ ) -> Tuple[Any, ...]:
+ # only the positive case is returned by sqlalchemy reflection so
+ # None and False are threated the same
+ if item.dialect_kwargs.get("postgresql_nulls_not_distinct"):
+ return ("nulls_not_distinct",)
+ return ()
+
+ def create_index_sig(self, index: Index) -> Tuple[Any, ...]:
+ return tuple(
+ self._cleanup_index_expr(
+ index,
+ *(
+ (e, "")
+ if isinstance(e, str)
+ else (self._compile_element(e), self._default_modifiers(e))
+ ),
+ )
+ for e in index.expressions
+ ) + self._dialect_sig(index)
+
+ def create_unique_constraint_sig(
+ self, const: UniqueConstraint
+ ) -> Tuple[Any, ...]:
+ return tuple(
+ sorted([col.name for col in const.columns])
+ ) + self._dialect_sig(const)
+
+ def adjust_reflected_dialect_options(
+ self, reflected_options: Dict[str, Any], kind: str
+ ) -> Dict[str, Any]:
+ options: Dict[str, Any]
+ options = reflected_options.get("dialect_options", {}).copy()
+ if not options.get("postgresql_include"):
+ options.pop("postgresql_include", None)
+ return options
+
+ def _compile_element(self, element: ClauseElement) -> str:
+ return element.compile(
+ dialect=self.dialect,
+ compile_kwargs={"literal_binds": True, "include_table": False},
+ ).string
+
+ def render_ddl_sql_expr(
+ self,
+ expr: ClauseElement,
+ is_server_default: bool = False,
+ is_index: bool = False,
+ **kw: Any,
+ ) -> str:
+ """Render a SQL expression that is typically a server default,
+ index expression, etc.
+
+ """
+
+ # apply self_group to index expressions;
+ # see https://github.com/sqlalchemy/sqlalchemy/blob/
+ # 82fa95cfce070fab401d020c6e6e4a6a96cc2578/
+ # lib/sqlalchemy/dialects/postgresql/base.py#L2261
+ if is_index and not isinstance(expr, ColumnClause):
+ expr = expr.self_group()
+
+ return super().render_ddl_sql_expr(
+ expr, is_server_default=is_server_default, is_index=is_index, **kw
+ )
+
+ def render_type(
+ self, type_: TypeEngine, autogen_context: AutogenContext
+ ) -> Union[str, Literal[False]]:
+ mod = type(type_).__module__
+ if not mod.startswith("sqlalchemy.dialects.postgresql"):
+ return False
+
+ if hasattr(self, "_render_%s_type" % type_.__visit_name__):
+ meth = getattr(self, "_render_%s_type" % type_.__visit_name__)
+ return meth(type_, autogen_context)
+
+ return False
+
+ def _render_HSTORE_type(
+ self, type_: HSTORE, autogen_context: AutogenContext
+ ) -> str:
+ return cast(
+ str,
+ render._render_type_w_subtype(
+ type_, autogen_context, "text_type", r"(.+?\(.*text_type=)"
+ ),
+ )
+
+ def _render_ARRAY_type(
+ self, type_: ARRAY, autogen_context: AutogenContext
+ ) -> str:
+ return cast(
+ str,
+ render._render_type_w_subtype(
+ type_, autogen_context, "item_type", r"(.+?\()"
+ ),
+ )
+
+ def _render_JSON_type(
+ self, type_: JSON, autogen_context: AutogenContext
+ ) -> str:
+ return cast(
+ str,
+ render._render_type_w_subtype(
+ type_, autogen_context, "astext_type", r"(.+?\(.*astext_type=)"
+ ),
+ )
+
+ def _render_JSONB_type(
+ self, type_: JSONB, autogen_context: AutogenContext
+ ) -> str:
+ return cast(
+ str,
+ render._render_type_w_subtype(
+ type_, autogen_context, "astext_type", r"(.+?\(.*astext_type=)"
+ ),
+ )
+
+
+class PostgresqlColumnType(AlterColumn):
+ def __init__(
+ self, name: str, column_name: str, type_: TypeEngine, **kw
+ ) -> None:
+ using = kw.pop("using", None)
+ super().__init__(name, column_name, **kw)
+ self.type_ = sqltypes.to_instance(type_)
+ self.using = using
+
+
+@compiles(RenameTable, "postgresql")
+def visit_rename_table(
+ element: RenameTable, compiler: PGDDLCompiler, **kw
+) -> str:
+ return "%s RENAME TO %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ format_table_name(compiler, element.new_table_name, None),
+ )
+
+
+@compiles(PostgresqlColumnType, "postgresql")
+def visit_column_type(
+ element: PostgresqlColumnType, compiler: PGDDLCompiler, **kw
+) -> str:
+ return "%s %s %s %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ alter_column(compiler, element.column_name),
+ "TYPE %s" % format_type(compiler, element.type_),
+ "USING %s" % element.using if element.using else "",
+ )
+
+
+@compiles(ColumnComment, "postgresql")
+def visit_column_comment(
+ element: ColumnComment, compiler: PGDDLCompiler, **kw
+) -> str:
+ ddl = "COMMENT ON COLUMN {table_name}.{column_name} IS {comment}"
+ comment = (
+ compiler.sql_compiler.render_literal_value(
+ element.comment, sqltypes.String()
+ )
+ if element.comment is not None
+ else "NULL"
+ )
+
+ return ddl.format(
+ table_name=format_table_name(
+ compiler, element.table_name, element.schema
+ ),
+ column_name=format_column_name(compiler, element.column_name),
+ comment=comment,
+ )
+
+
+@compiles(IdentityColumnDefault, "postgresql")
+def visit_identity_column(
+ element: IdentityColumnDefault, compiler: PGDDLCompiler, **kw
+):
+ text = "%s %s " % (
+ alter_table(compiler, element.table_name, element.schema),
+ alter_column(compiler, element.column_name),
+ )
+ if element.default is None:
+ # drop identity
+ text += "DROP IDENTITY"
+ return text
+ elif element.existing_server_default is None:
+ # add identity options
+ text += "ADD "
+ text += compiler.visit_identity_column(element.default)
+ return text
+ else:
+ # alter identity
+ diff, _, _ = element.impl._compare_identity_default(
+ element.default, element.existing_server_default
+ )
+ identity = element.default
+ for attr in sorted(diff):
+ if attr == "always":
+ text += "SET GENERATED %s " % (
+ "ALWAYS" if identity.always else "BY DEFAULT"
+ )
+ else:
+ text += "SET %s " % compiler.get_identity_options(
+ sqla_compat.Identity(**{attr: getattr(identity, attr)})
+ )
+ return text
+
+
+@Operations.register_operation("create_exclude_constraint")
+@BatchOperations.register_operation(
+ "create_exclude_constraint", "batch_create_exclude_constraint"
+)
+@ops.AddConstraintOp.register_add_constraint("exclude_constraint")
+class CreateExcludeConstraintOp(ops.AddConstraintOp):
+ """Represent a create exclude constraint operation."""
+
+ constraint_type = "exclude"
+
+ def __init__(
+ self,
+ constraint_name: sqla_compat._ConstraintName,
+ table_name: Union[str, quoted_name],
+ elements: Union[
+ Sequence[Tuple[str, str]],
+ Sequence[Tuple[ColumnClause[Any], str]],
+ ],
+ where: Optional[Union[ColumnElement[bool], str]] = None,
+ schema: Optional[str] = None,
+ _orig_constraint: Optional[ExcludeConstraint] = None,
+ **kw,
+ ) -> None:
+ self.constraint_name = constraint_name
+ self.table_name = table_name
+ self.elements = elements
+ self.where = where
+ self.schema = schema
+ self._orig_constraint = _orig_constraint
+ self.kw = kw
+
+ @classmethod
+ def from_constraint( # type:ignore[override]
+ cls, constraint: ExcludeConstraint
+ ) -> CreateExcludeConstraintOp:
+ constraint_table = sqla_compat._table_for_constraint(constraint)
+ return cls(
+ constraint.name,
+ constraint_table.name,
+ [
+ (expr, op)
+ for expr, name, op in constraint._render_exprs # type:ignore[attr-defined] # noqa
+ ],
+ where=cast("ColumnElement[bool] | None", constraint.where),
+ schema=constraint_table.schema,
+ _orig_constraint=constraint,
+ deferrable=constraint.deferrable,
+ initially=constraint.initially,
+ using=constraint.using,
+ )
+
+ def to_constraint(
+ self, migration_context: Optional[MigrationContext] = None
+ ) -> ExcludeConstraint:
+ if self._orig_constraint is not None:
+ return self._orig_constraint
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+ t = schema_obj.table(self.table_name, schema=self.schema)
+ excl = ExcludeConstraint(
+ *self.elements,
+ name=self.constraint_name,
+ where=self.where,
+ **self.kw,
+ )
+ for (
+ expr,
+ name,
+ oper,
+ ) in excl._render_exprs: # type:ignore[attr-defined]
+ t.append_column(Column(name, NULLTYPE))
+ t.append_constraint(excl)
+ return excl
+
+ @classmethod
+ def create_exclude_constraint(
+ cls,
+ operations: Operations,
+ constraint_name: str,
+ table_name: str,
+ *elements: Any,
+ **kw: Any,
+ ) -> Optional[Table]:
+ """Issue an alter to create an EXCLUDE constraint using the
+ current migration context.
+
+ .. note:: This method is Postgresql specific, and additionally
+ requires at least SQLAlchemy 1.0.
+
+ e.g.::
+
+ from alembic import op
+
+ op.create_exclude_constraint(
+ "user_excl",
+ "user",
+ ("period", "&&"),
+ ("group", "="),
+ where=("group != 'some group'"),
+ )
+
+ Note that the expressions work the same way as that of
+ the ``ExcludeConstraint`` object itself; if plain strings are
+ passed, quoting rules must be applied manually.
+
+ :param name: Name of the constraint.
+ :param table_name: String name of the source table.
+ :param elements: exclude conditions.
+ :param where: SQL expression or SQL string with optional WHERE
+ clause.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
+ NOT DEFERRABLE when issuing DDL for this constraint.
+ :param initially: optional string. If set, emit INITIALLY
+ when issuing DDL for this constraint.
+ :param schema: Optional schema name to operate within.
+
+ """
+ op = cls(constraint_name, table_name, elements, **kw)
+ return operations.invoke(op)
+
+ @classmethod
+ def batch_create_exclude_constraint(
+ cls,
+ operations: BatchOperations,
+ constraint_name: str,
+ *elements: Any,
+ **kw: Any,
+ ):
+ """Issue a "create exclude constraint" instruction using the
+ current batch migration context.
+
+ .. note:: This method is Postgresql specific, and additionally
+ requires at least SQLAlchemy 1.0.
+
+ .. seealso::
+
+ :meth:`.Operations.create_exclude_constraint`
+
+ """
+ kw["schema"] = operations.impl.schema
+ op = cls(constraint_name, operations.impl.table_name, elements, **kw)
+ return operations.invoke(op)
+
+
+@render.renderers.dispatch_for(CreateExcludeConstraintOp)
+def _add_exclude_constraint(
+ autogen_context: AutogenContext, op: CreateExcludeConstraintOp
+) -> str:
+ return _exclude_constraint(op.to_constraint(), autogen_context, alter=True)
+
+
+@render._constraint_renderers.dispatch_for(ExcludeConstraint)
+def _render_inline_exclude_constraint(
+ constraint: ExcludeConstraint,
+ autogen_context: AutogenContext,
+ namespace_metadata: MetaData,
+) -> str:
+ rendered = render._user_defined_render(
+ "exclude", constraint, autogen_context
+ )
+ if rendered is not False:
+ return rendered
+
+ return _exclude_constraint(constraint, autogen_context, False)
+
+
+def _postgresql_autogenerate_prefix(autogen_context: AutogenContext) -> str:
+ imports = autogen_context.imports
+ if imports is not None:
+ imports.add("from sqlalchemy.dialects import postgresql")
+ return "postgresql."
+
+
+def _exclude_constraint(
+ constraint: ExcludeConstraint,
+ autogen_context: AutogenContext,
+ alter: bool,
+) -> str:
+ opts: List[Tuple[str, Union[quoted_name, str, _f_name, None]]] = []
+
+ has_batch = autogen_context._has_batch
+
+ if constraint.deferrable:
+ opts.append(("deferrable", str(constraint.deferrable)))
+ if constraint.initially:
+ opts.append(("initially", str(constraint.initially)))
+ if constraint.using:
+ opts.append(("using", str(constraint.using)))
+ if not has_batch and alter and constraint.table.schema:
+ opts.append(("schema", render._ident(constraint.table.schema)))
+ if not alter and constraint.name:
+ opts.append(
+ ("name", render._render_gen_name(autogen_context, constraint.name))
+ )
+
+ def do_expr_where_opts():
+ args = [
+ "(%s, %r)"
+ % (
+ _render_potential_column(sqltext, autogen_context),
+ opstring,
+ )
+ for sqltext, name, opstring in constraint._render_exprs # type:ignore[attr-defined] # noqa
+ ]
+ if constraint.where is not None:
+ args.append(
+ "where=%s"
+ % render._render_potential_expr(
+ constraint.where, autogen_context
+ )
+ )
+ args.extend(["%s=%r" % (k, v) for k, v in opts])
+ return args
+
+ if alter:
+ args = [
+ repr(render._render_gen_name(autogen_context, constraint.name))
+ ]
+ if not has_batch:
+ args += [repr(render._ident(constraint.table.name))]
+ args.extend(do_expr_where_opts())
+ return "%(prefix)screate_exclude_constraint(%(args)s)" % {
+ "prefix": render._alembic_autogenerate_prefix(autogen_context),
+ "args": ", ".join(args),
+ }
+ else:
+ args = do_expr_where_opts()
+ return "%(prefix)sExcludeConstraint(%(args)s)" % {
+ "prefix": _postgresql_autogenerate_prefix(autogen_context),
+ "args": ", ".join(args),
+ }
+
+
+def _render_potential_column(
+ value: Union[
+ ColumnClause[Any], Column[Any], TextClause, FunctionElement[Any]
+ ],
+ autogen_context: AutogenContext,
+) -> str:
+ if isinstance(value, ColumnClause):
+ if value.is_literal:
+ # like literal_column("int8range(from, to)") in ExcludeConstraint
+ template = "%(prefix)sliteral_column(%(name)r)"
+ else:
+ template = "%(prefix)scolumn(%(name)r)"
+
+ return template % {
+ "prefix": render._sqlalchemy_autogenerate_prefix(autogen_context),
+ "name": value.name,
+ }
+ else:
+ return render._render_potential_expr(
+ value,
+ autogen_context,
+ wrap_in_text=isinstance(value, (TextClause, FunctionElement)),
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/ddl/sqlite.py b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/sqlite.py
new file mode 100644
index 00000000..c6186c60
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/ddl/sqlite.py
@@ -0,0 +1,223 @@
+from __future__ import annotations
+
+import re
+from typing import Any
+from typing import Dict
+from typing import Optional
+from typing import TYPE_CHECKING
+from typing import Union
+
+from sqlalchemy import cast
+from sqlalchemy import JSON
+from sqlalchemy import schema
+from sqlalchemy import sql
+from sqlalchemy.ext.compiler import compiles
+
+from .base import alter_table
+from .base import format_table_name
+from .base import RenameTable
+from .impl import DefaultImpl
+from .. import util
+
+if TYPE_CHECKING:
+ from sqlalchemy.engine.reflection import Inspector
+ from sqlalchemy.sql.compiler import DDLCompiler
+ from sqlalchemy.sql.elements import Cast
+ from sqlalchemy.sql.elements import ClauseElement
+ from sqlalchemy.sql.schema import Column
+ from sqlalchemy.sql.schema import Constraint
+ from sqlalchemy.sql.schema import Table
+ from sqlalchemy.sql.type_api import TypeEngine
+
+ from ..operations.batch import BatchOperationsImpl
+
+
+class SQLiteImpl(DefaultImpl):
+ __dialect__ = "sqlite"
+
+ transactional_ddl = False
+ """SQLite supports transactional DDL, but pysqlite does not:
+ see: http://bugs.python.org/issue10740
+ """
+
+ def requires_recreate_in_batch(
+ self, batch_op: BatchOperationsImpl
+ ) -> bool:
+ """Return True if the given :class:`.BatchOperationsImpl`
+ would need the table to be recreated and copied in order to
+ proceed.
+
+ Normally, only returns True on SQLite when operations other
+ than add_column are present.
+
+ """
+ for op in batch_op.batch:
+ if op[0] == "add_column":
+ col = op[1][1]
+ if isinstance(
+ col.server_default, schema.DefaultClause
+ ) and isinstance(col.server_default.arg, sql.ClauseElement):
+ return True
+ elif (
+ isinstance(col.server_default, util.sqla_compat.Computed)
+ and col.server_default.persisted
+ ):
+ return True
+ elif op[0] not in ("create_index", "drop_index"):
+ return True
+ else:
+ return False
+
+ def add_constraint(self, const: Constraint):
+ # attempt to distinguish between an
+ # auto-gen constraint and an explicit one
+ if const._create_rule is None: # type:ignore[attr-defined]
+ raise NotImplementedError(
+ "No support for ALTER of constraints in SQLite dialect. "
+ "Please refer to the batch mode feature which allows for "
+ "SQLite migrations using a copy-and-move strategy."
+ )
+ elif const._create_rule(self): # type:ignore[attr-defined]
+ util.warn(
+ "Skipping unsupported ALTER for "
+ "creation of implicit constraint. "
+ "Please refer to the batch mode feature which allows for "
+ "SQLite migrations using a copy-and-move strategy."
+ )
+
+ def drop_constraint(self, const: Constraint):
+ if const._create_rule is None: # type:ignore[attr-defined]
+ raise NotImplementedError(
+ "No support for ALTER of constraints in SQLite dialect. "
+ "Please refer to the batch mode feature which allows for "
+ "SQLite migrations using a copy-and-move strategy."
+ )
+
+ def compare_server_default(
+ self,
+ inspector_column: Column[Any],
+ metadata_column: Column[Any],
+ rendered_metadata_default: Optional[str],
+ rendered_inspector_default: Optional[str],
+ ) -> bool:
+ if rendered_metadata_default is not None:
+ rendered_metadata_default = re.sub(
+ r"^\((.+)\)$", r"\1", rendered_metadata_default
+ )
+
+ rendered_metadata_default = re.sub(
+ r"^\"?'(.+)'\"?$", r"\1", rendered_metadata_default
+ )
+
+ if rendered_inspector_default is not None:
+ rendered_inspector_default = re.sub(
+ r"^\((.+)\)$", r"\1", rendered_inspector_default
+ )
+
+ rendered_inspector_default = re.sub(
+ r"^\"?'(.+)'\"?$", r"\1", rendered_inspector_default
+ )
+
+ return rendered_inspector_default != rendered_metadata_default
+
+ def _guess_if_default_is_unparenthesized_sql_expr(
+ self, expr: Optional[str]
+ ) -> bool:
+ """Determine if a server default is a SQL expression or a constant.
+
+ There are too many assertions that expect server defaults to round-trip
+ identically without parenthesis added so we will add parens only in
+ very specific cases.
+
+ """
+ if not expr:
+ return False
+ elif re.match(r"^[0-9\.]$", expr):
+ return False
+ elif re.match(r"^'.+'$", expr):
+ return False
+ elif re.match(r"^\(.+\)$", expr):
+ return False
+ else:
+ return True
+
+ def autogen_column_reflect(
+ self,
+ inspector: Inspector,
+ table: Table,
+ column_info: Dict[str, Any],
+ ) -> None:
+ # SQLite expression defaults require parenthesis when sent
+ # as DDL
+ if self._guess_if_default_is_unparenthesized_sql_expr(
+ column_info.get("default", None)
+ ):
+ column_info["default"] = "(%s)" % (column_info["default"],)
+
+ def render_ddl_sql_expr(
+ self, expr: ClauseElement, is_server_default: bool = False, **kw
+ ) -> str:
+ # SQLite expression defaults require parenthesis when sent
+ # as DDL
+ str_expr = super().render_ddl_sql_expr(
+ expr, is_server_default=is_server_default, **kw
+ )
+
+ if (
+ is_server_default
+ and self._guess_if_default_is_unparenthesized_sql_expr(str_expr)
+ ):
+ str_expr = "(%s)" % (str_expr,)
+ return str_expr
+
+ def cast_for_batch_migrate(
+ self,
+ existing: Column[Any],
+ existing_transfer: Dict[str, Union[TypeEngine, Cast]],
+ new_type: TypeEngine,
+ ) -> None:
+ if (
+ existing.type._type_affinity # type:ignore[attr-defined]
+ is not new_type._type_affinity # type:ignore[attr-defined]
+ and not isinstance(new_type, JSON)
+ ):
+ existing_transfer["expr"] = cast(
+ existing_transfer["expr"], new_type
+ )
+
+ def correct_for_autogen_constraints(
+ self,
+ conn_unique_constraints,
+ conn_indexes,
+ metadata_unique_constraints,
+ metadata_indexes,
+ ):
+ self._skip_functional_indexes(metadata_indexes, conn_indexes)
+
+
+@compiles(RenameTable, "sqlite")
+def visit_rename_table(
+ element: RenameTable, compiler: DDLCompiler, **kw
+) -> str:
+ return "%s RENAME TO %s" % (
+ alter_table(compiler, element.table_name, element.schema),
+ format_table_name(compiler, element.new_table_name, None),
+ )
+
+
+# @compiles(AddColumn, 'sqlite')
+# def visit_add_column(element, compiler, **kw):
+# return "%s %s" % (
+# alter_table(compiler, element.table_name, element.schema),
+# add_column(compiler, element.column, **kw)
+# )
+
+
+# def add_column(compiler, column, **kw):
+# text = "ADD COLUMN %s" % compiler.get_column_specification(column, **kw)
+# need to modify SQLAlchemy so that the CHECK associated with a Boolean
+# or Enum gets placed as part of the column constraints, not the Table
+# see ticket 98
+# for const in column.constraints:
+# text += compiler.process(AddConstraint(const))
+# return text
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/environment.py b/Backend/venv/lib/python3.12/site-packages/alembic/environment.py
new file mode 100644
index 00000000..adfc93eb
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/environment.py
@@ -0,0 +1 @@
+from .runtime.environment import * # noqa
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/migration.py b/Backend/venv/lib/python3.12/site-packages/alembic/migration.py
new file mode 100644
index 00000000..02626e2c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/migration.py
@@ -0,0 +1 @@
+from .runtime.migration import * # noqa
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/op.py b/Backend/venv/lib/python3.12/site-packages/alembic/op.py
new file mode 100644
index 00000000..f3f5fac0
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/op.py
@@ -0,0 +1,5 @@
+from .operations.base import Operations
+
+# create proxy functions for
+# each method on the Operations class.
+Operations.create_module_class_proxy(globals(), locals())
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/op.pyi b/Backend/venv/lib/python3.12/site-packages/alembic/op.pyi
new file mode 100644
index 00000000..944b5ae1
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/op.pyi
@@ -0,0 +1,1283 @@
+# ### this file stubs are generated by tools/write_pyi.py - do not edit ###
+# ### imports are manually managed
+from __future__ import annotations
+
+from contextlib import contextmanager
+from typing import Any
+from typing import Awaitable
+from typing import Callable
+from typing import Dict
+from typing import Iterator
+from typing import List
+from typing import Literal
+from typing import Mapping
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+if TYPE_CHECKING:
+ from sqlalchemy.engine import Connection
+ from sqlalchemy.sql import Executable
+ from sqlalchemy.sql.elements import ColumnElement
+ from sqlalchemy.sql.elements import conv
+ from sqlalchemy.sql.elements import TextClause
+ from sqlalchemy.sql.expression import TableClause
+ from sqlalchemy.sql.functions import Function
+ from sqlalchemy.sql.schema import Column
+ from sqlalchemy.sql.schema import Computed
+ from sqlalchemy.sql.schema import Identity
+ from sqlalchemy.sql.schema import SchemaItem
+ from sqlalchemy.sql.schema import Table
+ from sqlalchemy.sql.type_api import TypeEngine
+ from sqlalchemy.util import immutabledict
+
+ from .operations.ops import BatchOperations
+ from .operations.ops import MigrateOperation
+ from .runtime.migration import MigrationContext
+ from .util.sqla_compat import _literal_bindparam
+
+_T = TypeVar("_T")
+### end imports ###
+
+def add_column(
+ table_name: str, column: Column[Any], *, schema: Optional[str] = None
+) -> None:
+ """Issue an "add column" instruction using the current
+ migration context.
+
+ e.g.::
+
+ from alembic import op
+ from sqlalchemy import Column, String
+
+ op.add_column("organization", Column("name", String()))
+
+ The :meth:`.Operations.add_column` method typically corresponds
+ to the SQL command "ALTER TABLE... ADD COLUMN". Within the scope
+ of this command, the column's name, datatype, nullability,
+ and optional server-generated defaults may be indicated.
+
+ .. note::
+
+ With the exception of NOT NULL constraints or single-column FOREIGN
+ KEY constraints, other kinds of constraints such as PRIMARY KEY,
+ UNIQUE or CHECK constraints **cannot** be generated using this
+ method; for these constraints, refer to operations such as
+ :meth:`.Operations.create_primary_key` and
+ :meth:`.Operations.create_check_constraint`. In particular, the
+ following :class:`~sqlalchemy.schema.Column` parameters are
+ **ignored**:
+
+ * :paramref:`~sqlalchemy.schema.Column.primary_key` - SQL databases
+ typically do not support an ALTER operation that can add
+ individual columns one at a time to an existing primary key
+ constraint, therefore it's less ambiguous to use the
+ :meth:`.Operations.create_primary_key` method, which assumes no
+ existing primary key constraint is present.
+ * :paramref:`~sqlalchemy.schema.Column.unique` - use the
+ :meth:`.Operations.create_unique_constraint` method
+ * :paramref:`~sqlalchemy.schema.Column.index` - use the
+ :meth:`.Operations.create_index` method
+
+
+ The provided :class:`~sqlalchemy.schema.Column` object may include a
+ :class:`~sqlalchemy.schema.ForeignKey` constraint directive,
+ referencing a remote table name. For this specific type of constraint,
+ Alembic will automatically emit a second ALTER statement in order to
+ add the single-column FOREIGN KEY constraint separately::
+
+ from alembic import op
+ from sqlalchemy import Column, INTEGER, ForeignKey
+
+ op.add_column(
+ "organization",
+ Column("account_id", INTEGER, ForeignKey("accounts.id")),
+ )
+
+ The column argument passed to :meth:`.Operations.add_column` is a
+ :class:`~sqlalchemy.schema.Column` construct, used in the same way it's
+ used in SQLAlchemy. In particular, values or functions to be indicated
+ as producing the column's default value on the database side are
+ specified using the ``server_default`` parameter, and not ``default``
+ which only specifies Python-side defaults::
+
+ from alembic import op
+ from sqlalchemy import Column, TIMESTAMP, func
+
+ # specify "DEFAULT NOW" along with the column add
+ op.add_column(
+ "account",
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
+ )
+
+ :param table_name: String name of the parent table.
+ :param column: a :class:`sqlalchemy.schema.Column` object
+ representing the new column.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """
+
+def alter_column(
+ table_name: str,
+ column_name: str,
+ *,
+ nullable: Optional[bool] = None,
+ comment: Union[str, Literal[False], None] = False,
+ server_default: Any = False,
+ new_column_name: Optional[str] = None,
+ type_: Union[TypeEngine, Type[TypeEngine], None] = None,
+ existing_type: Union[TypeEngine, Type[TypeEngine], None] = None,
+ existing_server_default: Union[
+ str, bool, Identity, Computed, None
+ ] = False,
+ existing_nullable: Optional[bool] = None,
+ existing_comment: Optional[str] = None,
+ schema: Optional[str] = None,
+ **kw: Any,
+) -> None:
+ r"""Issue an "alter column" instruction using the
+ current migration context.
+
+ Generally, only that aspect of the column which
+ is being changed, i.e. name, type, nullability,
+ default, needs to be specified. Multiple changes
+ can also be specified at once and the backend should
+ "do the right thing", emitting each change either
+ separately or together as the backend allows.
+
+ MySQL has special requirements here, since MySQL
+ cannot ALTER a column without a full specification.
+ When producing MySQL-compatible migration files,
+ it is recommended that the ``existing_type``,
+ ``existing_server_default``, and ``existing_nullable``
+ parameters be present, if not being altered.
+
+ Type changes which are against the SQLAlchemy
+ "schema" types :class:`~sqlalchemy.types.Boolean`
+ and :class:`~sqlalchemy.types.Enum` may also
+ add or drop constraints which accompany those
+ types on backends that don't support them natively.
+ The ``existing_type`` argument is
+ used in this case to identify and remove a previous
+ constraint that was bound to the type object.
+
+ :param table_name: string name of the target table.
+ :param column_name: string name of the target column,
+ as it exists before the operation begins.
+ :param nullable: Optional; specify ``True`` or ``False``
+ to alter the column's nullability.
+ :param server_default: Optional; specify a string
+ SQL expression, :func:`~sqlalchemy.sql.expression.text`,
+ or :class:`~sqlalchemy.schema.DefaultClause` to indicate
+ an alteration to the column's default value.
+ Set to ``None`` to have the default removed.
+ :param comment: optional string text of a new comment to add to the
+ column.
+ :param new_column_name: Optional; specify a string name here to
+ indicate the new name within a column rename operation.
+ :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine`
+ type object to specify a change to the column's type.
+ For SQLAlchemy types that also indicate a constraint (i.e.
+ :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`),
+ the constraint is also generated.
+ :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column;
+ currently understood by the MySQL dialect.
+ :param existing_type: Optional; a
+ :class:`~sqlalchemy.types.TypeEngine`
+ type object to specify the previous type. This
+ is required for all MySQL column alter operations that
+ don't otherwise specify a new type, as well as for
+ when nullability is being changed on a SQL Server
+ column. It is also used if the type is a so-called
+ SQLAlchemy "schema" type which may define a constraint (i.e.
+ :class:`~sqlalchemy.types.Boolean`,
+ :class:`~sqlalchemy.types.Enum`),
+ so that the constraint can be dropped.
+ :param existing_server_default: Optional; The existing
+ default value of the column. Required on MySQL if
+ an existing default is not being changed; else MySQL
+ removes the default.
+ :param existing_nullable: Optional; the existing nullability
+ of the column. Required on MySQL if the existing nullability
+ is not being changed; else MySQL sets this to NULL.
+ :param existing_autoincrement: Optional; the existing autoincrement
+ of the column. Used for MySQL's system of altering a column
+ that specifies ``AUTO_INCREMENT``.
+ :param existing_comment: string text of the existing comment on the
+ column to be maintained. Required on MySQL if the existing comment
+ on the column is not being changed.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param postgresql_using: String argument which will indicate a
+ SQL expression to render within the Postgresql-specific USING clause
+ within ALTER COLUMN. This string is taken directly as raw SQL which
+ must explicitly include any necessary quoting or escaping of tokens
+ within the expression.
+
+ """
+
+@contextmanager
+def batch_alter_table(
+ table_name: str,
+ schema: Optional[str] = None,
+ recreate: Literal["auto", "always", "never"] = "auto",
+ partial_reordering: Optional[tuple] = None,
+ copy_from: Optional[Table] = None,
+ table_args: Tuple[Any, ...] = (),
+ table_kwargs: Mapping[str, Any] = immutabledict({}),
+ reflect_args: Tuple[Any, ...] = (),
+ reflect_kwargs: Mapping[str, Any] = immutabledict({}),
+ naming_convention: Optional[Dict[str, str]] = None,
+) -> Iterator[BatchOperations]:
+ """Invoke a series of per-table migrations in batch.
+
+ Batch mode allows a series of operations specific to a table
+ to be syntactically grouped together, and allows for alternate
+ modes of table migration, in particular the "recreate" style of
+ migration required by SQLite.
+
+ "recreate" style is as follows:
+
+ 1. A new table is created with the new specification, based on the
+ migration directives within the batch, using a temporary name.
+
+ 2. the data copied from the existing table to the new table.
+
+ 3. the existing table is dropped.
+
+ 4. the new table is renamed to the existing table name.
+
+ The directive by default will only use "recreate" style on the
+ SQLite backend, and only if directives are present which require
+ this form, e.g. anything other than ``add_column()``. The batch
+ operation on other backends will proceed using standard ALTER TABLE
+ operations.
+
+ The method is used as a context manager, which returns an instance
+ of :class:`.BatchOperations`; this object is the same as
+ :class:`.Operations` except that table names and schema names
+ are omitted. E.g.::
+
+ with op.batch_alter_table("some_table") as batch_op:
+ batch_op.add_column(Column("foo", Integer))
+ batch_op.drop_column("bar")
+
+ The operations within the context manager are invoked at once
+ when the context is ended. When run against SQLite, if the
+ migrations include operations not supported by SQLite's ALTER TABLE,
+ the entire table will be copied to a new one with the new
+ specification, moving all data across as well.
+
+ The copy operation by default uses reflection to retrieve the current
+ structure of the table, and therefore :meth:`.batch_alter_table`
+ in this mode requires that the migration is run in "online" mode.
+ The ``copy_from`` parameter may be passed which refers to an existing
+ :class:`.Table` object, which will bypass this reflection step.
+
+ .. note:: The table copy operation will currently not copy
+ CHECK constraints, and may not copy UNIQUE constraints that are
+ unnamed, as is possible on SQLite. See the section
+ :ref:`sqlite_batch_constraints` for workarounds.
+
+ :param table_name: name of table
+ :param schema: optional schema name.
+ :param recreate: under what circumstances the table should be
+ recreated. At its default of ``"auto"``, the SQLite dialect will
+ recreate the table if any operations other than ``add_column()``,
+ ``create_index()``, or ``drop_index()`` are
+ present. Other options include ``"always"`` and ``"never"``.
+ :param copy_from: optional :class:`~sqlalchemy.schema.Table` object
+ that will act as the structure of the table being copied. If omitted,
+ table reflection is used to retrieve the structure of the table.
+
+ .. seealso::
+
+ :ref:`batch_offline_mode`
+
+ :paramref:`~.Operations.batch_alter_table.reflect_args`
+
+ :paramref:`~.Operations.batch_alter_table.reflect_kwargs`
+
+ :param reflect_args: a sequence of additional positional arguments that
+ will be applied to the table structure being reflected / copied;
+ this may be used to pass column and constraint overrides to the
+ table that will be reflected, in lieu of passing the whole
+ :class:`~sqlalchemy.schema.Table` using
+ :paramref:`~.Operations.batch_alter_table.copy_from`.
+ :param reflect_kwargs: a dictionary of additional keyword arguments
+ that will be applied to the table structure being copied; this may be
+ used to pass additional table and reflection options to the table that
+ will be reflected, in lieu of passing the whole
+ :class:`~sqlalchemy.schema.Table` using
+ :paramref:`~.Operations.batch_alter_table.copy_from`.
+ :param table_args: a sequence of additional positional arguments that
+ will be applied to the new :class:`~sqlalchemy.schema.Table` when
+ created, in addition to those copied from the source table.
+ This may be used to provide additional constraints such as CHECK
+ constraints that may not be reflected.
+ :param table_kwargs: a dictionary of additional keyword arguments
+ that will be applied to the new :class:`~sqlalchemy.schema.Table`
+ when created, in addition to those copied from the source table.
+ This may be used to provide for additional table options that may
+ not be reflected.
+ :param naming_convention: a naming convention dictionary of the form
+ described at :ref:`autogen_naming_conventions` which will be applied
+ to the :class:`~sqlalchemy.schema.MetaData` during the reflection
+ process. This is typically required if one wants to drop SQLite
+ constraints, as these constraints will not have names when
+ reflected on this backend. Requires SQLAlchemy **0.9.4** or greater.
+
+ .. seealso::
+
+ :ref:`dropping_sqlite_foreign_keys`
+
+ :param partial_reordering: a list of tuples, each suggesting a desired
+ ordering of two or more columns in the newly created table. Requires
+ that :paramref:`.batch_alter_table.recreate` is set to ``"always"``.
+ Examples, given a table with columns "a", "b", "c", and "d":
+
+ Specify the order of all columns::
+
+ with op.batch_alter_table(
+ "some_table",
+ recreate="always",
+ partial_reordering=[("c", "d", "a", "b")],
+ ) as batch_op:
+ pass
+
+ Ensure "d" appears before "c", and "b", appears before "a"::
+
+ with op.batch_alter_table(
+ "some_table",
+ recreate="always",
+ partial_reordering=[("d", "c"), ("b", "a")],
+ ) as batch_op:
+ pass
+
+ The ordering of columns not included in the partial_reordering
+ set is undefined. Therefore it is best to specify the complete
+ ordering of all columns for best results.
+
+ .. note:: batch mode requires SQLAlchemy 0.8 or above.
+
+ .. seealso::
+
+ :ref:`batch_migrations`
+
+ """
+
+def bulk_insert(
+ table: Union[Table, TableClause],
+ rows: List[dict],
+ *,
+ multiinsert: bool = True,
+) -> None:
+ """Issue a "bulk insert" operation using the current
+ migration context.
+
+ This provides a means of representing an INSERT of multiple rows
+ which works equally well in the context of executing on a live
+ connection as well as that of generating a SQL script. In the
+ case of a SQL script, the values are rendered inline into the
+ statement.
+
+ e.g.::
+
+ from alembic import op
+ from datetime import date
+ from sqlalchemy.sql import table, column
+ from sqlalchemy import String, Integer, Date
+
+ # Create an ad-hoc table to use for the insert statement.
+ accounts_table = table(
+ "account",
+ column("id", Integer),
+ column("name", String),
+ column("create_date", Date),
+ )
+
+ op.bulk_insert(
+ accounts_table,
+ [
+ {
+ "id": 1,
+ "name": "John Smith",
+ "create_date": date(2010, 10, 5),
+ },
+ {
+ "id": 2,
+ "name": "Ed Williams",
+ "create_date": date(2007, 5, 27),
+ },
+ {
+ "id": 3,
+ "name": "Wendy Jones",
+ "create_date": date(2008, 8, 15),
+ },
+ ],
+ )
+
+ When using --sql mode, some datatypes may not render inline
+ automatically, such as dates and other special types. When this
+ issue is present, :meth:`.Operations.inline_literal` may be used::
+
+ op.bulk_insert(
+ accounts_table,
+ [
+ {
+ "id": 1,
+ "name": "John Smith",
+ "create_date": op.inline_literal("2010-10-05"),
+ },
+ {
+ "id": 2,
+ "name": "Ed Williams",
+ "create_date": op.inline_literal("2007-05-27"),
+ },
+ {
+ "id": 3,
+ "name": "Wendy Jones",
+ "create_date": op.inline_literal("2008-08-15"),
+ },
+ ],
+ multiinsert=False,
+ )
+
+ When using :meth:`.Operations.inline_literal` in conjunction with
+ :meth:`.Operations.bulk_insert`, in order for the statement to work
+ in "online" (e.g. non --sql) mode, the
+ :paramref:`~.Operations.bulk_insert.multiinsert`
+ flag should be set to ``False``, which will have the effect of
+ individual INSERT statements being emitted to the database, each
+ with a distinct VALUES clause, so that the "inline" values can
+ still be rendered, rather than attempting to pass the values
+ as bound parameters.
+
+ :param table: a table object which represents the target of the INSERT.
+
+ :param rows: a list of dictionaries indicating rows.
+
+ :param multiinsert: when at its default of True and --sql mode is not
+ enabled, the INSERT statement will be executed using
+ "executemany()" style, where all elements in the list of
+ dictionaries are passed as bound parameters in a single
+ list. Setting this to False results in individual INSERT
+ statements being emitted per parameter set, and is needed
+ in those cases where non-literal values are present in the
+ parameter sets.
+
+ """
+
+def create_check_constraint(
+ constraint_name: Optional[str],
+ table_name: str,
+ condition: Union[str, ColumnElement[bool], TextClause],
+ *,
+ schema: Optional[str] = None,
+ **kw: Any,
+) -> None:
+ """Issue a "create check constraint" instruction using the
+ current migration context.
+
+ e.g.::
+
+ from alembic import op
+ from sqlalchemy.sql import column, func
+
+ op.create_check_constraint(
+ "ck_user_name_len",
+ "user",
+ func.len(column("name")) > 5,
+ )
+
+ CHECK constraints are usually against a SQL expression, so ad-hoc
+ table metadata is usually needed. The function will convert the given
+ arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound
+ to an anonymous table in order to emit the CREATE statement.
+
+ :param name: Name of the check constraint. The name is necessary
+ so that an ALTER statement can be emitted. For setups that
+ use an automated naming scheme such as that described at
+ :ref:`sqla:constraint_naming_conventions`,
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param table_name: String name of the source table.
+ :param condition: SQL expression that's the condition of the
+ constraint. Can be a string or SQLAlchemy expression language
+ structure.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
+ NOT DEFERRABLE when issuing DDL for this constraint.
+ :param initially: optional string. If set, emit INITIALLY
+ when issuing DDL for this constraint.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """
+
+def create_exclude_constraint(
+ constraint_name: str, table_name: str, *elements: Any, **kw: Any
+) -> Optional[Table]:
+ """Issue an alter to create an EXCLUDE constraint using the
+ current migration context.
+
+ .. note:: This method is Postgresql specific, and additionally
+ requires at least SQLAlchemy 1.0.
+
+ e.g.::
+
+ from alembic import op
+
+ op.create_exclude_constraint(
+ "user_excl",
+ "user",
+ ("period", "&&"),
+ ("group", "="),
+ where=("group != 'some group'"),
+ )
+
+ Note that the expressions work the same way as that of
+ the ``ExcludeConstraint`` object itself; if plain strings are
+ passed, quoting rules must be applied manually.
+
+ :param name: Name of the constraint.
+ :param table_name: String name of the source table.
+ :param elements: exclude conditions.
+ :param where: SQL expression or SQL string with optional WHERE
+ clause.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
+ NOT DEFERRABLE when issuing DDL for this constraint.
+ :param initially: optional string. If set, emit INITIALLY
+ when issuing DDL for this constraint.
+ :param schema: Optional schema name to operate within.
+
+ """
+
+def create_foreign_key(
+ constraint_name: Optional[str],
+ source_table: str,
+ referent_table: str,
+ local_cols: List[str],
+ remote_cols: List[str],
+ *,
+ onupdate: Optional[str] = None,
+ ondelete: Optional[str] = None,
+ deferrable: Optional[bool] = None,
+ initially: Optional[str] = None,
+ match: Optional[str] = None,
+ source_schema: Optional[str] = None,
+ referent_schema: Optional[str] = None,
+ **dialect_kw: Any,
+) -> None:
+ """Issue a "create foreign key" instruction using the
+ current migration context.
+
+ e.g.::
+
+ from alembic import op
+
+ op.create_foreign_key(
+ "fk_user_address",
+ "address",
+ "user",
+ ["user_id"],
+ ["id"],
+ )
+
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
+ containing the necessary columns, then generates a new
+ :class:`~sqlalchemy.schema.ForeignKeyConstraint`
+ object which it then associates with the
+ :class:`~sqlalchemy.schema.Table`.
+ Any event listeners associated with this action will be fired
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
+ construct is ultimately used to generate the ALTER statement.
+
+ :param constraint_name: Name of the foreign key constraint. The name
+ is necessary so that an ALTER statement can be emitted. For setups
+ that use an automated naming scheme such as that described at
+ :ref:`sqla:constraint_naming_conventions`,
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param source_table: String name of the source table.
+ :param referent_table: String name of the destination table.
+ :param local_cols: a list of string column names in the
+ source table.
+ :param remote_cols: a list of string column names in the
+ remote table.
+ :param onupdate: Optional string. If set, emit ON UPDATE when
+ issuing DDL for this constraint. Typical values include CASCADE,
+ DELETE and RESTRICT.
+ :param ondelete: Optional string. If set, emit ON DELETE when
+ issuing DDL for this constraint. Typical values include CASCADE,
+ DELETE and RESTRICT.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or NOT
+ DEFERRABLE when issuing DDL for this constraint.
+ :param source_schema: Optional schema name of the source table.
+ :param referent_schema: Optional schema name of the destination table.
+
+ """
+
+def create_index(
+ index_name: Optional[str],
+ table_name: str,
+ columns: Sequence[Union[str, TextClause, Function[Any]]],
+ *,
+ schema: Optional[str] = None,
+ unique: bool = False,
+ if_not_exists: Optional[bool] = None,
+ **kw: Any,
+) -> None:
+ r"""Issue a "create index" instruction using the current
+ migration context.
+
+ e.g.::
+
+ from alembic import op
+
+ op.create_index("ik_test", "t1", ["foo", "bar"])
+
+ Functional indexes can be produced by using the
+ :func:`sqlalchemy.sql.expression.text` construct::
+
+ from alembic import op
+ from sqlalchemy import text
+
+ op.create_index("ik_test", "t1", [text("lower(foo)")])
+
+ :param index_name: name of the index.
+ :param table_name: name of the owning table.
+ :param columns: a list consisting of string column names and/or
+ :func:`~sqlalchemy.sql.expression.text` constructs.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param unique: If True, create a unique index.
+
+ :param quote: Force quoting of this column's name on or off,
+ corresponding to ``True`` or ``False``. When left at its default
+ of ``None``, the column identifier will be quoted according to
+ whether the name is case sensitive (identifiers with at least one
+ upper case character are treated as case sensitive), or if it's a
+ reserved word. This flag is only needed to force quoting of a
+ reserved word which is not known by the SQLAlchemy dialect.
+
+ :param if_not_exists: If True, adds IF NOT EXISTS operator when
+ creating the new index.
+
+ .. versionadded:: 1.12.0
+
+ :param \**kw: Additional keyword arguments not mentioned above are
+ dialect specific, and passed in the form
+ ``_``.
+ See the documentation regarding an individual dialect at
+ :ref:`dialect_toplevel` for detail on documented arguments.
+
+ """
+
+def create_primary_key(
+ constraint_name: Optional[str],
+ table_name: str,
+ columns: List[str],
+ *,
+ schema: Optional[str] = None,
+) -> None:
+ """Issue a "create primary key" instruction using the current
+ migration context.
+
+ e.g.::
+
+ from alembic import op
+
+ op.create_primary_key("pk_my_table", "my_table", ["id", "version"])
+
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
+ containing the necessary columns, then generates a new
+ :class:`~sqlalchemy.schema.PrimaryKeyConstraint`
+ object which it then associates with the
+ :class:`~sqlalchemy.schema.Table`.
+ Any event listeners associated with this action will be fired
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
+ construct is ultimately used to generate the ALTER statement.
+
+ :param constraint_name: Name of the primary key constraint. The name
+ is necessary so that an ALTER statement can be emitted. For setups
+ that use an automated naming scheme such as that described at
+ :ref:`sqla:constraint_naming_conventions`
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param table_name: String name of the target table.
+ :param columns: a list of string column names to be applied to the
+ primary key constraint.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """
+
+def create_table(table_name: str, *columns: SchemaItem, **kw: Any) -> Table:
+ r"""Issue a "create table" instruction using the current migration
+ context.
+
+ This directive receives an argument list similar to that of the
+ traditional :class:`sqlalchemy.schema.Table` construct, but without the
+ metadata::
+
+ from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
+ from alembic import op
+
+ op.create_table(
+ "account",
+ Column("id", INTEGER, primary_key=True),
+ Column("name", VARCHAR(50), nullable=False),
+ Column("description", NVARCHAR(200)),
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
+ )
+
+ Note that :meth:`.create_table` accepts
+ :class:`~sqlalchemy.schema.Column`
+ constructs directly from the SQLAlchemy library. In particular,
+ default values to be created on the database side are
+ specified using the ``server_default`` parameter, and not
+ ``default`` which only specifies Python-side defaults::
+
+ from alembic import op
+ from sqlalchemy import Column, TIMESTAMP, func
+
+ # specify "DEFAULT NOW" along with the "timestamp" column
+ op.create_table(
+ "account",
+ Column("id", INTEGER, primary_key=True),
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
+ )
+
+ The function also returns a newly created
+ :class:`~sqlalchemy.schema.Table` object, corresponding to the table
+ specification given, which is suitable for
+ immediate SQL operations, in particular
+ :meth:`.Operations.bulk_insert`::
+
+ from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
+ from alembic import op
+
+ account_table = op.create_table(
+ "account",
+ Column("id", INTEGER, primary_key=True),
+ Column("name", VARCHAR(50), nullable=False),
+ Column("description", NVARCHAR(200)),
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
+ )
+
+ op.bulk_insert(
+ account_table,
+ [
+ {"name": "A1", "description": "account 1"},
+ {"name": "A2", "description": "account 2"},
+ ],
+ )
+
+ :param table_name: Name of the table
+ :param \*columns: collection of :class:`~sqlalchemy.schema.Column`
+ objects within
+ the table, as well as optional :class:`~sqlalchemy.schema.Constraint`
+ objects
+ and :class:`~.sqlalchemy.schema.Index` objects.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param \**kw: Other keyword arguments are passed to the underlying
+ :class:`sqlalchemy.schema.Table` object created for the command.
+
+ :return: the :class:`~sqlalchemy.schema.Table` object corresponding
+ to the parameters given.
+
+ """
+
+def create_table_comment(
+ table_name: str,
+ comment: Optional[str],
+ *,
+ existing_comment: Optional[str] = None,
+ schema: Optional[str] = None,
+) -> None:
+ """Emit a COMMENT ON operation to set the comment for a table.
+
+ :param table_name: string name of the target table.
+ :param comment: string value of the comment being registered against
+ the specified table.
+ :param existing_comment: String value of a comment
+ already registered on the specified table, used within autogenerate
+ so that the operation is reversible, but not required for direct
+ use.
+
+ .. seealso::
+
+ :meth:`.Operations.drop_table_comment`
+
+ :paramref:`.Operations.alter_column.comment`
+
+ """
+
+def create_unique_constraint(
+ constraint_name: Optional[str],
+ table_name: str,
+ columns: Sequence[str],
+ *,
+ schema: Optional[str] = None,
+ **kw: Any,
+) -> Any:
+ """Issue a "create unique constraint" instruction using the
+ current migration context.
+
+ e.g.::
+
+ from alembic import op
+ op.create_unique_constraint("uq_user_name", "user", ["name"])
+
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
+ containing the necessary columns, then generates a new
+ :class:`~sqlalchemy.schema.UniqueConstraint`
+ object which it then associates with the
+ :class:`~sqlalchemy.schema.Table`.
+ Any event listeners associated with this action will be fired
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
+ construct is ultimately used to generate the ALTER statement.
+
+ :param name: Name of the unique constraint. The name is necessary
+ so that an ALTER statement can be emitted. For setups that
+ use an automated naming scheme such as that described at
+ :ref:`sqla:constraint_naming_conventions`,
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param table_name: String name of the source table.
+ :param columns: a list of string column names in the
+ source table.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
+ NOT DEFERRABLE when issuing DDL for this constraint.
+ :param initially: optional string. If set, emit INITIALLY
+ when issuing DDL for this constraint.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """
+
+def drop_column(
+ table_name: str,
+ column_name: str,
+ *,
+ schema: Optional[str] = None,
+ **kw: Any,
+) -> None:
+ """Issue a "drop column" instruction using the current
+ migration context.
+
+ e.g.::
+
+ drop_column("organization", "account_id")
+
+ :param table_name: name of table
+ :param column_name: name of column
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param mssql_drop_check: Optional boolean. When ``True``, on
+ Microsoft SQL Server only, first
+ drop the CHECK constraint on the column using a
+ SQL-script-compatible
+ block that selects into a @variable from sys.check_constraints,
+ then exec's a separate DROP CONSTRAINT for that constraint.
+ :param mssql_drop_default: Optional boolean. When ``True``, on
+ Microsoft SQL Server only, first
+ drop the DEFAULT constraint on the column using a
+ SQL-script-compatible
+ block that selects into a @variable from sys.default_constraints,
+ then exec's a separate DROP CONSTRAINT for that default.
+ :param mssql_drop_foreign_key: Optional boolean. When ``True``, on
+ Microsoft SQL Server only, first
+ drop a single FOREIGN KEY constraint on the column using a
+ SQL-script-compatible
+ block that selects into a @variable from
+ sys.foreign_keys/sys.foreign_key_columns,
+ then exec's a separate DROP CONSTRAINT for that default. Only
+ works if the column has exactly one FK constraint which refers to
+ it, at the moment.
+
+ """
+
+def drop_constraint(
+ constraint_name: str,
+ table_name: str,
+ type_: Optional[str] = None,
+ *,
+ schema: Optional[str] = None,
+) -> None:
+ r"""Drop a constraint of the given name, typically via DROP CONSTRAINT.
+
+ :param constraint_name: name of the constraint.
+ :param table_name: table name.
+ :param type\_: optional, required on MySQL. can be
+ 'foreignkey', 'primary', 'unique', or 'check'.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """
+
+def drop_index(
+ index_name: str,
+ table_name: Optional[str] = None,
+ *,
+ schema: Optional[str] = None,
+ if_exists: Optional[bool] = None,
+ **kw: Any,
+) -> None:
+ r"""Issue a "drop index" instruction using the current
+ migration context.
+
+ e.g.::
+
+ drop_index("accounts")
+
+ :param index_name: name of the index.
+ :param table_name: name of the owning table. Some
+ backends such as Microsoft SQL Server require this.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ :param if_exists: If True, adds IF EXISTS operator when
+ dropping the index.
+
+ .. versionadded:: 1.12.0
+
+ :param \**kw: Additional keyword arguments not mentioned above are
+ dialect specific, and passed in the form
+ ``_``.
+ See the documentation regarding an individual dialect at
+ :ref:`dialect_toplevel` for detail on documented arguments.
+
+ """
+
+def drop_table(
+ table_name: str, *, schema: Optional[str] = None, **kw: Any
+) -> None:
+ r"""Issue a "drop table" instruction using the current
+ migration context.
+
+
+ e.g.::
+
+ drop_table("accounts")
+
+ :param table_name: Name of the table
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param \**kw: Other keyword arguments are passed to the underlying
+ :class:`sqlalchemy.schema.Table` object created for the command.
+
+ """
+
+def drop_table_comment(
+ table_name: str,
+ *,
+ existing_comment: Optional[str] = None,
+ schema: Optional[str] = None,
+) -> None:
+ """Issue a "drop table comment" operation to
+ remove an existing comment set on a table.
+
+ :param table_name: string name of the target table.
+ :param existing_comment: An optional string value of a comment already
+ registered on the specified table.
+
+ .. seealso::
+
+ :meth:`.Operations.create_table_comment`
+
+ :paramref:`.Operations.alter_column.comment`
+
+ """
+
+def execute(
+ sqltext: Union[Executable, str],
+ *,
+ execution_options: Optional[dict[str, Any]] = None,
+) -> None:
+ r"""Execute the given SQL using the current migration context.
+
+ The given SQL can be a plain string, e.g.::
+
+ op.execute("INSERT INTO table (foo) VALUES ('some value')")
+
+ Or it can be any kind of Core SQL Expression construct, such as
+ below where we use an update construct::
+
+ from sqlalchemy.sql import table, column
+ from sqlalchemy import String
+ from alembic import op
+
+ account = table("account", column("name", String))
+ op.execute(
+ account.update()
+ .where(account.c.name == op.inline_literal("account 1"))
+ .values({"name": op.inline_literal("account 2")})
+ )
+
+ Above, we made use of the SQLAlchemy
+ :func:`sqlalchemy.sql.expression.table` and
+ :func:`sqlalchemy.sql.expression.column` constructs to make a brief,
+ ad-hoc table construct just for our UPDATE statement. A full
+ :class:`~sqlalchemy.schema.Table` construct of course works perfectly
+ fine as well, though note it's a recommended practice to at least
+ ensure the definition of a table is self-contained within the migration
+ script, rather than imported from a module that may break compatibility
+ with older migrations.
+
+ In a SQL script context, the statement is emitted directly to the
+ output stream. There is *no* return result, however, as this
+ function is oriented towards generating a change script
+ that can run in "offline" mode. Additionally, parameterized
+ statements are discouraged here, as they *will not work* in offline
+ mode. Above, we use :meth:`.inline_literal` where parameters are
+ to be used.
+
+ For full interaction with a connected database where parameters can
+ also be used normally, use the "bind" available from the context::
+
+ from alembic import op
+
+ connection = op.get_bind()
+
+ connection.execute(
+ account.update()
+ .where(account.c.name == "account 1")
+ .values({"name": "account 2"})
+ )
+
+ Additionally, when passing the statement as a plain string, it is first
+ coerced into a :func:`sqlalchemy.sql.expression.text` construct
+ before being passed along. In the less likely case that the
+ literal SQL string contains a colon, it must be escaped with a
+ backslash, as::
+
+ op.execute(r"INSERT INTO table (foo) VALUES ('\:colon_value')")
+
+
+ :param sqltext: Any legal SQLAlchemy expression, including:
+
+ * a string
+ * a :func:`sqlalchemy.sql.expression.text` construct.
+ * a :func:`sqlalchemy.sql.expression.insert` construct.
+ * a :func:`sqlalchemy.sql.expression.update` construct.
+ * a :func:`sqlalchemy.sql.expression.delete` construct.
+ * Any "executable" described in SQLAlchemy Core documentation,
+ noting that no result set is returned.
+
+ .. note:: when passing a plain string, the statement is coerced into
+ a :func:`sqlalchemy.sql.expression.text` construct. This construct
+ considers symbols with colons, e.g. ``:foo`` to be bound parameters.
+ To avoid this, ensure that colon symbols are escaped, e.g.
+ ``\:foo``.
+
+ :param execution_options: Optional dictionary of
+ execution options, will be passed to
+ :meth:`sqlalchemy.engine.Connection.execution_options`.
+ """
+
+def f(name: str) -> conv:
+ """Indicate a string name that has already had a naming convention
+ applied to it.
+
+ This feature combines with the SQLAlchemy ``naming_convention`` feature
+ to disambiguate constraint names that have already had naming
+ conventions applied to them, versus those that have not. This is
+ necessary in the case that the ``"%(constraint_name)s"`` token
+ is used within a naming convention, so that it can be identified
+ that this particular name should remain fixed.
+
+ If the :meth:`.Operations.f` is used on a constraint, the naming
+ convention will not take effect::
+
+ op.add_column("t", "x", Boolean(name=op.f("ck_bool_t_x")))
+
+ Above, the CHECK constraint generated will have the name
+ ``ck_bool_t_x`` regardless of whether or not a naming convention is
+ in use.
+
+ Alternatively, if a naming convention is in use, and 'f' is not used,
+ names will be converted along conventions. If the ``target_metadata``
+ contains the naming convention
+ ``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the
+ output of the following:
+
+ op.add_column("t", "x", Boolean(name="x"))
+
+ will be::
+
+ CONSTRAINT ck_bool_t_x CHECK (x in (1, 0)))
+
+ The function is rendered in the output of autogenerate when
+ a particular constraint name is already converted.
+
+ """
+
+def get_bind() -> Connection:
+ """Return the current 'bind'.
+
+ Under normal circumstances, this is the
+ :class:`~sqlalchemy.engine.Connection` currently being used
+ to emit SQL to the database.
+
+ In a SQL script context, this value is ``None``. [TODO: verify this]
+
+ """
+
+def get_context() -> MigrationContext:
+ """Return the :class:`.MigrationContext` object that's
+ currently in use.
+
+ """
+
+def implementation_for(op_cls: Any) -> Callable[..., Any]:
+ """Register an implementation for a given :class:`.MigrateOperation`.
+
+ This is part of the operation extensibility API.
+
+ .. seealso::
+
+ :ref:`operation_plugins` - example of use
+
+ """
+
+def inline_literal(
+ value: Union[str, int], type_: Optional[TypeEngine] = None
+) -> _literal_bindparam:
+ r"""Produce an 'inline literal' expression, suitable for
+ using in an INSERT, UPDATE, or DELETE statement.
+
+ When using Alembic in "offline" mode, CRUD operations
+ aren't compatible with SQLAlchemy's default behavior surrounding
+ literal values,
+ which is that they are converted into bound values and passed
+ separately into the ``execute()`` method of the DBAPI cursor.
+ An offline SQL
+ script needs to have these rendered inline. While it should
+ always be noted that inline literal values are an **enormous**
+ security hole in an application that handles untrusted input,
+ a schema migration is not run in this context, so
+ literals are safe to render inline, with the caveat that
+ advanced types like dates may not be supported directly
+ by SQLAlchemy.
+
+ See :meth:`.Operations.execute` for an example usage of
+ :meth:`.Operations.inline_literal`.
+
+ The environment can also be configured to attempt to render
+ "literal" values inline automatically, for those simple types
+ that are supported by the dialect; see
+ :paramref:`.EnvironmentContext.configure.literal_binds` for this
+ more recently added feature.
+
+ :param value: The value to render. Strings, integers, and simple
+ numerics should be supported. Other types like boolean,
+ dates, etc. may or may not be supported yet by various
+ backends.
+ :param type\_: optional - a :class:`sqlalchemy.types.TypeEngine`
+ subclass stating the type of this value. In SQLAlchemy
+ expressions, this is usually derived automatically
+ from the Python type of the value itself, as well as
+ based on the context in which the value is used.
+
+ .. seealso::
+
+ :paramref:`.EnvironmentContext.configure.literal_binds`
+
+ """
+
+def invoke(operation: MigrateOperation) -> Any:
+ """Given a :class:`.MigrateOperation`, invoke it in terms of
+ this :class:`.Operations` instance.
+
+ """
+
+def register_operation(
+ name: str, sourcename: Optional[str] = None
+) -> Callable[[_T], _T]:
+ """Register a new operation for this class.
+
+ This method is normally used to add new operations
+ to the :class:`.Operations` class, and possibly the
+ :class:`.BatchOperations` class as well. All Alembic migration
+ operations are implemented via this system, however the system
+ is also available as a public API to facilitate adding custom
+ operations.
+
+ .. seealso::
+
+ :ref:`operation_plugins`
+
+
+ """
+
+def rename_table(
+ old_table_name: str, new_table_name: str, *, schema: Optional[str] = None
+) -> None:
+ """Emit an ALTER TABLE to rename a table.
+
+ :param old_table_name: old name.
+ :param new_table_name: new name.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """
+
+def run_async(
+ async_function: Callable[..., Awaitable[_T]], *args: Any, **kw_args: Any
+) -> _T:
+ """Invoke the given asynchronous callable, passing an asynchronous
+ :class:`~sqlalchemy.ext.asyncio.AsyncConnection` as the first
+ argument.
+
+ This method allows calling async functions from within the
+ synchronous ``upgrade()`` or ``downgrade()`` alembic migration
+ method.
+
+ The async connection passed to the callable shares the same
+ transaction as the connection running in the migration context.
+
+ Any additional arg or kw_arg passed to this function are passed
+ to the provided async function.
+
+ .. versionadded: 1.11
+
+ .. note::
+
+ This method can be called only when alembic is called using
+ an async dialect.
+ """
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/operations/__init__.py b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__init__.py
new file mode 100644
index 00000000..26197cbe
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__init__.py
@@ -0,0 +1,15 @@
+from . import toimpl
+from .base import AbstractOperations
+from .base import BatchOperations
+from .base import Operations
+from .ops import MigrateOperation
+from .ops import MigrationScript
+
+
+__all__ = [
+ "AbstractOperations",
+ "Operations",
+ "BatchOperations",
+ "MigrateOperation",
+ "MigrationScript",
+]
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..86605037
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/base.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/base.cpython-312.pyc
new file mode 100644
index 00000000..dfeb16e9
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/base.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/batch.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/batch.cpython-312.pyc
new file mode 100644
index 00000000..70d82b8c
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/batch.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/ops.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/ops.cpython-312.pyc
new file mode 100644
index 00000000..87760173
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/ops.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/schemaobj.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/schemaobj.cpython-312.pyc
new file mode 100644
index 00000000..82acfbb4
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/schemaobj.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/toimpl.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/toimpl.cpython-312.pyc
new file mode 100644
index 00000000..e1f697ce
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/operations/__pycache__/toimpl.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/operations/base.py b/Backend/venv/lib/python3.12/site-packages/alembic/operations/base.py
new file mode 100644
index 00000000..e3207be7
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/operations/base.py
@@ -0,0 +1,1837 @@
+from __future__ import annotations
+
+from contextlib import contextmanager
+import re
+import textwrap
+from typing import Any
+from typing import Awaitable
+from typing import Callable
+from typing import Dict
+from typing import Iterator
+from typing import List # noqa
+from typing import Mapping
+from typing import Optional
+from typing import Sequence # noqa
+from typing import Tuple
+from typing import Type # noqa
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from sqlalchemy.sql.elements import conv
+
+from . import batch
+from . import schemaobj
+from .. import util
+from ..util import sqla_compat
+from ..util.compat import formatannotation_fwdref
+from ..util.compat import inspect_formatargspec
+from ..util.compat import inspect_getfullargspec
+from ..util.sqla_compat import _literal_bindparam
+
+
+if TYPE_CHECKING:
+ from typing import Literal
+
+ from sqlalchemy import Table
+ from sqlalchemy.engine import Connection
+ from sqlalchemy.sql import Executable
+ from sqlalchemy.sql.expression import ColumnElement
+ from sqlalchemy.sql.expression import TableClause
+ from sqlalchemy.sql.expression import TextClause
+ from sqlalchemy.sql.functions import Function
+ from sqlalchemy.sql.schema import Column
+ from sqlalchemy.sql.schema import Computed
+ from sqlalchemy.sql.schema import Identity
+ from sqlalchemy.sql.schema import SchemaItem
+ from sqlalchemy.types import TypeEngine
+
+ from .batch import BatchOperationsImpl
+ from .ops import MigrateOperation
+ from ..ddl import DefaultImpl
+ from ..runtime.migration import MigrationContext
+__all__ = ("Operations", "BatchOperations")
+_T = TypeVar("_T")
+
+
+class AbstractOperations(util.ModuleClsProxy):
+ """Base class for Operations and BatchOperations.
+
+ .. versionadded:: 1.11.0
+
+ """
+
+ impl: Union[DefaultImpl, BatchOperationsImpl]
+ _to_impl = util.Dispatcher()
+
+ def __init__(
+ self,
+ migration_context: MigrationContext,
+ impl: Optional[BatchOperationsImpl] = None,
+ ) -> None:
+ """Construct a new :class:`.Operations`
+
+ :param migration_context: a :class:`.MigrationContext`
+ instance.
+
+ """
+ self.migration_context = migration_context
+ if impl is None:
+ self.impl = migration_context.impl
+ else:
+ self.impl = impl
+
+ self.schema_obj = schemaobj.SchemaObjects(migration_context)
+
+ @classmethod
+ def register_operation(
+ cls, name: str, sourcename: Optional[str] = None
+ ) -> Callable[[_T], _T]:
+ """Register a new operation for this class.
+
+ This method is normally used to add new operations
+ to the :class:`.Operations` class, and possibly the
+ :class:`.BatchOperations` class as well. All Alembic migration
+ operations are implemented via this system, however the system
+ is also available as a public API to facilitate adding custom
+ operations.
+
+ .. seealso::
+
+ :ref:`operation_plugins`
+
+
+ """
+
+ def register(op_cls):
+ if sourcename is None:
+ fn = getattr(op_cls, name)
+ source_name = fn.__name__
+ else:
+ fn = getattr(op_cls, sourcename)
+ source_name = fn.__name__
+
+ spec = inspect_getfullargspec(fn)
+
+ name_args = spec[0]
+ assert name_args[0:2] == ["cls", "operations"]
+
+ name_args[0:2] = ["self"]
+
+ args = inspect_formatargspec(
+ *spec, formatannotation=formatannotation_fwdref
+ )
+ num_defaults = len(spec[3]) if spec[3] else 0
+ if num_defaults:
+ defaulted_vals = name_args[0 - num_defaults :]
+ else:
+ defaulted_vals = ()
+
+ defaulted_vals += tuple(spec[4])
+ # here, we are using formatargspec in a different way in order
+ # to get a string that will re-apply incoming arguments to a new
+ # function call
+
+ apply_kw = inspect_formatargspec(
+ name_args + spec[4],
+ spec[1],
+ spec[2],
+ defaulted_vals,
+ formatvalue=lambda x: "=" + x,
+ formatannotation=formatannotation_fwdref,
+ )
+
+ args = re.sub(
+ r'[_]?ForwardRef\(([\'"].+?[\'"])\)',
+ lambda m: m.group(1),
+ args,
+ )
+
+ func_text = textwrap.dedent(
+ """\
+ def %(name)s%(args)s:
+ %(doc)r
+ return op_cls.%(source_name)s%(apply_kw)s
+ """
+ % {
+ "name": name,
+ "source_name": source_name,
+ "args": args,
+ "apply_kw": apply_kw,
+ "doc": fn.__doc__,
+ }
+ )
+
+ globals_ = dict(globals())
+ globals_.update({"op_cls": op_cls})
+ lcl = {}
+
+ exec(func_text, globals_, lcl)
+ setattr(cls, name, lcl[name])
+ fn.__func__.__doc__ = (
+ "This method is proxied on "
+ "the :class:`.%s` class, via the :meth:`.%s.%s` method."
+ % (cls.__name__, cls.__name__, name)
+ )
+ if hasattr(fn, "_legacy_translations"):
+ lcl[name]._legacy_translations = fn._legacy_translations
+ return op_cls
+
+ return register
+
+ @classmethod
+ def implementation_for(cls, op_cls: Any) -> Callable[..., Any]:
+ """Register an implementation for a given :class:`.MigrateOperation`.
+
+ This is part of the operation extensibility API.
+
+ .. seealso::
+
+ :ref:`operation_plugins` - example of use
+
+ """
+
+ def decorate(fn):
+ cls._to_impl.dispatch_for(op_cls)(fn)
+ return fn
+
+ return decorate
+
+ @classmethod
+ @contextmanager
+ def context(
+ cls, migration_context: MigrationContext
+ ) -> Iterator[Operations]:
+ op = Operations(migration_context)
+ op._install_proxy()
+ yield op
+ op._remove_proxy()
+
+ @contextmanager
+ def batch_alter_table(
+ self,
+ table_name: str,
+ schema: Optional[str] = None,
+ recreate: Literal["auto", "always", "never"] = "auto",
+ partial_reordering: Optional[tuple] = None,
+ copy_from: Optional[Table] = None,
+ table_args: Tuple[Any, ...] = (),
+ table_kwargs: Mapping[str, Any] = util.immutabledict(),
+ reflect_args: Tuple[Any, ...] = (),
+ reflect_kwargs: Mapping[str, Any] = util.immutabledict(),
+ naming_convention: Optional[Dict[str, str]] = None,
+ ) -> Iterator[BatchOperations]:
+ """Invoke a series of per-table migrations in batch.
+
+ Batch mode allows a series of operations specific to a table
+ to be syntactically grouped together, and allows for alternate
+ modes of table migration, in particular the "recreate" style of
+ migration required by SQLite.
+
+ "recreate" style is as follows:
+
+ 1. A new table is created with the new specification, based on the
+ migration directives within the batch, using a temporary name.
+
+ 2. the data copied from the existing table to the new table.
+
+ 3. the existing table is dropped.
+
+ 4. the new table is renamed to the existing table name.
+
+ The directive by default will only use "recreate" style on the
+ SQLite backend, and only if directives are present which require
+ this form, e.g. anything other than ``add_column()``. The batch
+ operation on other backends will proceed using standard ALTER TABLE
+ operations.
+
+ The method is used as a context manager, which returns an instance
+ of :class:`.BatchOperations`; this object is the same as
+ :class:`.Operations` except that table names and schema names
+ are omitted. E.g.::
+
+ with op.batch_alter_table("some_table") as batch_op:
+ batch_op.add_column(Column("foo", Integer))
+ batch_op.drop_column("bar")
+
+ The operations within the context manager are invoked at once
+ when the context is ended. When run against SQLite, if the
+ migrations include operations not supported by SQLite's ALTER TABLE,
+ the entire table will be copied to a new one with the new
+ specification, moving all data across as well.
+
+ The copy operation by default uses reflection to retrieve the current
+ structure of the table, and therefore :meth:`.batch_alter_table`
+ in this mode requires that the migration is run in "online" mode.
+ The ``copy_from`` parameter may be passed which refers to an existing
+ :class:`.Table` object, which will bypass this reflection step.
+
+ .. note:: The table copy operation will currently not copy
+ CHECK constraints, and may not copy UNIQUE constraints that are
+ unnamed, as is possible on SQLite. See the section
+ :ref:`sqlite_batch_constraints` for workarounds.
+
+ :param table_name: name of table
+ :param schema: optional schema name.
+ :param recreate: under what circumstances the table should be
+ recreated. At its default of ``"auto"``, the SQLite dialect will
+ recreate the table if any operations other than ``add_column()``,
+ ``create_index()``, or ``drop_index()`` are
+ present. Other options include ``"always"`` and ``"never"``.
+ :param copy_from: optional :class:`~sqlalchemy.schema.Table` object
+ that will act as the structure of the table being copied. If omitted,
+ table reflection is used to retrieve the structure of the table.
+
+ .. seealso::
+
+ :ref:`batch_offline_mode`
+
+ :paramref:`~.Operations.batch_alter_table.reflect_args`
+
+ :paramref:`~.Operations.batch_alter_table.reflect_kwargs`
+
+ :param reflect_args: a sequence of additional positional arguments that
+ will be applied to the table structure being reflected / copied;
+ this may be used to pass column and constraint overrides to the
+ table that will be reflected, in lieu of passing the whole
+ :class:`~sqlalchemy.schema.Table` using
+ :paramref:`~.Operations.batch_alter_table.copy_from`.
+ :param reflect_kwargs: a dictionary of additional keyword arguments
+ that will be applied to the table structure being copied; this may be
+ used to pass additional table and reflection options to the table that
+ will be reflected, in lieu of passing the whole
+ :class:`~sqlalchemy.schema.Table` using
+ :paramref:`~.Operations.batch_alter_table.copy_from`.
+ :param table_args: a sequence of additional positional arguments that
+ will be applied to the new :class:`~sqlalchemy.schema.Table` when
+ created, in addition to those copied from the source table.
+ This may be used to provide additional constraints such as CHECK
+ constraints that may not be reflected.
+ :param table_kwargs: a dictionary of additional keyword arguments
+ that will be applied to the new :class:`~sqlalchemy.schema.Table`
+ when created, in addition to those copied from the source table.
+ This may be used to provide for additional table options that may
+ not be reflected.
+ :param naming_convention: a naming convention dictionary of the form
+ described at :ref:`autogen_naming_conventions` which will be applied
+ to the :class:`~sqlalchemy.schema.MetaData` during the reflection
+ process. This is typically required if one wants to drop SQLite
+ constraints, as these constraints will not have names when
+ reflected on this backend. Requires SQLAlchemy **0.9.4** or greater.
+
+ .. seealso::
+
+ :ref:`dropping_sqlite_foreign_keys`
+
+ :param partial_reordering: a list of tuples, each suggesting a desired
+ ordering of two or more columns in the newly created table. Requires
+ that :paramref:`.batch_alter_table.recreate` is set to ``"always"``.
+ Examples, given a table with columns "a", "b", "c", and "d":
+
+ Specify the order of all columns::
+
+ with op.batch_alter_table(
+ "some_table",
+ recreate="always",
+ partial_reordering=[("c", "d", "a", "b")],
+ ) as batch_op:
+ pass
+
+ Ensure "d" appears before "c", and "b", appears before "a"::
+
+ with op.batch_alter_table(
+ "some_table",
+ recreate="always",
+ partial_reordering=[("d", "c"), ("b", "a")],
+ ) as batch_op:
+ pass
+
+ The ordering of columns not included in the partial_reordering
+ set is undefined. Therefore it is best to specify the complete
+ ordering of all columns for best results.
+
+ .. note:: batch mode requires SQLAlchemy 0.8 or above.
+
+ .. seealso::
+
+ :ref:`batch_migrations`
+
+ """
+ impl = batch.BatchOperationsImpl(
+ self,
+ table_name,
+ schema,
+ recreate,
+ copy_from,
+ table_args,
+ table_kwargs,
+ reflect_args,
+ reflect_kwargs,
+ naming_convention,
+ partial_reordering,
+ )
+ batch_op = BatchOperations(self.migration_context, impl=impl)
+ yield batch_op
+ impl.flush()
+
+ def get_context(self) -> MigrationContext:
+ """Return the :class:`.MigrationContext` object that's
+ currently in use.
+
+ """
+
+ return self.migration_context
+
+ def invoke(self, operation: MigrateOperation) -> Any:
+ """Given a :class:`.MigrateOperation`, invoke it in terms of
+ this :class:`.Operations` instance.
+
+ """
+ fn = self._to_impl.dispatch(
+ operation, self.migration_context.impl.__dialect__
+ )
+ return fn(self, operation)
+
+ def f(self, name: str) -> conv:
+ """Indicate a string name that has already had a naming convention
+ applied to it.
+
+ This feature combines with the SQLAlchemy ``naming_convention`` feature
+ to disambiguate constraint names that have already had naming
+ conventions applied to them, versus those that have not. This is
+ necessary in the case that the ``"%(constraint_name)s"`` token
+ is used within a naming convention, so that it can be identified
+ that this particular name should remain fixed.
+
+ If the :meth:`.Operations.f` is used on a constraint, the naming
+ convention will not take effect::
+
+ op.add_column("t", "x", Boolean(name=op.f("ck_bool_t_x")))
+
+ Above, the CHECK constraint generated will have the name
+ ``ck_bool_t_x`` regardless of whether or not a naming convention is
+ in use.
+
+ Alternatively, if a naming convention is in use, and 'f' is not used,
+ names will be converted along conventions. If the ``target_metadata``
+ contains the naming convention
+ ``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the
+ output of the following:
+
+ op.add_column("t", "x", Boolean(name="x"))
+
+ will be::
+
+ CONSTRAINT ck_bool_t_x CHECK (x in (1, 0)))
+
+ The function is rendered in the output of autogenerate when
+ a particular constraint name is already converted.
+
+ """
+ return conv(name)
+
+ def inline_literal(
+ self, value: Union[str, int], type_: Optional[TypeEngine[Any]] = None
+ ) -> _literal_bindparam:
+ r"""Produce an 'inline literal' expression, suitable for
+ using in an INSERT, UPDATE, or DELETE statement.
+
+ When using Alembic in "offline" mode, CRUD operations
+ aren't compatible with SQLAlchemy's default behavior surrounding
+ literal values,
+ which is that they are converted into bound values and passed
+ separately into the ``execute()`` method of the DBAPI cursor.
+ An offline SQL
+ script needs to have these rendered inline. While it should
+ always be noted that inline literal values are an **enormous**
+ security hole in an application that handles untrusted input,
+ a schema migration is not run in this context, so
+ literals are safe to render inline, with the caveat that
+ advanced types like dates may not be supported directly
+ by SQLAlchemy.
+
+ See :meth:`.Operations.execute` for an example usage of
+ :meth:`.Operations.inline_literal`.
+
+ The environment can also be configured to attempt to render
+ "literal" values inline automatically, for those simple types
+ that are supported by the dialect; see
+ :paramref:`.EnvironmentContext.configure.literal_binds` for this
+ more recently added feature.
+
+ :param value: The value to render. Strings, integers, and simple
+ numerics should be supported. Other types like boolean,
+ dates, etc. may or may not be supported yet by various
+ backends.
+ :param type\_: optional - a :class:`sqlalchemy.types.TypeEngine`
+ subclass stating the type of this value. In SQLAlchemy
+ expressions, this is usually derived automatically
+ from the Python type of the value itself, as well as
+ based on the context in which the value is used.
+
+ .. seealso::
+
+ :paramref:`.EnvironmentContext.configure.literal_binds`
+
+ """
+ return sqla_compat._literal_bindparam(None, value, type_=type_)
+
+ def get_bind(self) -> Connection:
+ """Return the current 'bind'.
+
+ Under normal circumstances, this is the
+ :class:`~sqlalchemy.engine.Connection` currently being used
+ to emit SQL to the database.
+
+ In a SQL script context, this value is ``None``. [TODO: verify this]
+
+ """
+ return self.migration_context.impl.bind # type: ignore[return-value]
+
+ def run_async(
+ self,
+ async_function: Callable[..., Awaitable[_T]],
+ *args: Any,
+ **kw_args: Any,
+ ) -> _T:
+ """Invoke the given asynchronous callable, passing an asynchronous
+ :class:`~sqlalchemy.ext.asyncio.AsyncConnection` as the first
+ argument.
+
+ This method allows calling async functions from within the
+ synchronous ``upgrade()`` or ``downgrade()`` alembic migration
+ method.
+
+ The async connection passed to the callable shares the same
+ transaction as the connection running in the migration context.
+
+ Any additional arg or kw_arg passed to this function are passed
+ to the provided async function.
+
+ .. versionadded: 1.11
+
+ .. note::
+
+ This method can be called only when alembic is called using
+ an async dialect.
+ """
+ if not sqla_compat.sqla_14_18:
+ raise NotImplementedError("SQLAlchemy 1.4.18+ required")
+ sync_conn = self.get_bind()
+ if sync_conn is None:
+ raise NotImplementedError("Cannot call run_async in SQL mode")
+ if not sync_conn.dialect.is_async:
+ raise ValueError("Cannot call run_async with a sync engine")
+ from sqlalchemy.ext.asyncio import AsyncConnection
+ from sqlalchemy.util import await_only
+
+ async_conn = AsyncConnection._retrieve_proxy_for_target(sync_conn)
+ return await_only(async_function(async_conn, *args, **kw_args))
+
+
+class Operations(AbstractOperations):
+ """Define high level migration operations.
+
+ Each operation corresponds to some schema migration operation,
+ executed against a particular :class:`.MigrationContext`
+ which in turn represents connectivity to a database,
+ or a file output stream.
+
+ While :class:`.Operations` is normally configured as
+ part of the :meth:`.EnvironmentContext.run_migrations`
+ method called from an ``env.py`` script, a standalone
+ :class:`.Operations` instance can be
+ made for use cases external to regular Alembic
+ migrations by passing in a :class:`.MigrationContext`::
+
+ from alembic.migration import MigrationContext
+ from alembic.operations import Operations
+
+ conn = myengine.connect()
+ ctx = MigrationContext.configure(conn)
+ op = Operations(ctx)
+
+ op.alter_column("t", "c", nullable=True)
+
+ Note that as of 0.8, most of the methods on this class are produced
+ dynamically using the :meth:`.Operations.register_operation`
+ method.
+
+ """
+
+ if TYPE_CHECKING:
+ # START STUB FUNCTIONS: op_cls
+ # ### the following stubs are generated by tools/write_pyi.py ###
+ # ### do not edit ###
+
+ def add_column(
+ self,
+ table_name: str,
+ column: Column[Any],
+ *,
+ schema: Optional[str] = None,
+ ) -> None:
+ """Issue an "add column" instruction using the current
+ migration context.
+
+ e.g.::
+
+ from alembic import op
+ from sqlalchemy import Column, String
+
+ op.add_column("organization", Column("name", String()))
+
+ The :meth:`.Operations.add_column` method typically corresponds
+ to the SQL command "ALTER TABLE... ADD COLUMN". Within the scope
+ of this command, the column's name, datatype, nullability,
+ and optional server-generated defaults may be indicated.
+
+ .. note::
+
+ With the exception of NOT NULL constraints or single-column FOREIGN
+ KEY constraints, other kinds of constraints such as PRIMARY KEY,
+ UNIQUE or CHECK constraints **cannot** be generated using this
+ method; for these constraints, refer to operations such as
+ :meth:`.Operations.create_primary_key` and
+ :meth:`.Operations.create_check_constraint`. In particular, the
+ following :class:`~sqlalchemy.schema.Column` parameters are
+ **ignored**:
+
+ * :paramref:`~sqlalchemy.schema.Column.primary_key` - SQL databases
+ typically do not support an ALTER operation that can add
+ individual columns one at a time to an existing primary key
+ constraint, therefore it's less ambiguous to use the
+ :meth:`.Operations.create_primary_key` method, which assumes no
+ existing primary key constraint is present.
+ * :paramref:`~sqlalchemy.schema.Column.unique` - use the
+ :meth:`.Operations.create_unique_constraint` method
+ * :paramref:`~sqlalchemy.schema.Column.index` - use the
+ :meth:`.Operations.create_index` method
+
+
+ The provided :class:`~sqlalchemy.schema.Column` object may include a
+ :class:`~sqlalchemy.schema.ForeignKey` constraint directive,
+ referencing a remote table name. For this specific type of constraint,
+ Alembic will automatically emit a second ALTER statement in order to
+ add the single-column FOREIGN KEY constraint separately::
+
+ from alembic import op
+ from sqlalchemy import Column, INTEGER, ForeignKey
+
+ op.add_column(
+ "organization",
+ Column("account_id", INTEGER, ForeignKey("accounts.id")),
+ )
+
+ The column argument passed to :meth:`.Operations.add_column` is a
+ :class:`~sqlalchemy.schema.Column` construct, used in the same way it's
+ used in SQLAlchemy. In particular, values or functions to be indicated
+ as producing the column's default value on the database side are
+ specified using the ``server_default`` parameter, and not ``default``
+ which only specifies Python-side defaults::
+
+ from alembic import op
+ from sqlalchemy import Column, TIMESTAMP, func
+
+ # specify "DEFAULT NOW" along with the column add
+ op.add_column(
+ "account",
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
+ )
+
+ :param table_name: String name of the parent table.
+ :param column: a :class:`sqlalchemy.schema.Column` object
+ representing the new column.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """ # noqa: E501
+ ...
+
+ def alter_column(
+ self,
+ table_name: str,
+ column_name: str,
+ *,
+ nullable: Optional[bool] = None,
+ comment: Union[str, Literal[False], None] = False,
+ server_default: Any = False,
+ new_column_name: Optional[str] = None,
+ type_: Union[TypeEngine, Type[TypeEngine], None] = None,
+ existing_type: Union[TypeEngine, Type[TypeEngine], None] = None,
+ existing_server_default: Union[
+ str, bool, Identity, Computed, None
+ ] = False,
+ existing_nullable: Optional[bool] = None,
+ existing_comment: Optional[str] = None,
+ schema: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ r"""Issue an "alter column" instruction using the
+ current migration context.
+
+ Generally, only that aspect of the column which
+ is being changed, i.e. name, type, nullability,
+ default, needs to be specified. Multiple changes
+ can also be specified at once and the backend should
+ "do the right thing", emitting each change either
+ separately or together as the backend allows.
+
+ MySQL has special requirements here, since MySQL
+ cannot ALTER a column without a full specification.
+ When producing MySQL-compatible migration files,
+ it is recommended that the ``existing_type``,
+ ``existing_server_default``, and ``existing_nullable``
+ parameters be present, if not being altered.
+
+ Type changes which are against the SQLAlchemy
+ "schema" types :class:`~sqlalchemy.types.Boolean`
+ and :class:`~sqlalchemy.types.Enum` may also
+ add or drop constraints which accompany those
+ types on backends that don't support them natively.
+ The ``existing_type`` argument is
+ used in this case to identify and remove a previous
+ constraint that was bound to the type object.
+
+ :param table_name: string name of the target table.
+ :param column_name: string name of the target column,
+ as it exists before the operation begins.
+ :param nullable: Optional; specify ``True`` or ``False``
+ to alter the column's nullability.
+ :param server_default: Optional; specify a string
+ SQL expression, :func:`~sqlalchemy.sql.expression.text`,
+ or :class:`~sqlalchemy.schema.DefaultClause` to indicate
+ an alteration to the column's default value.
+ Set to ``None`` to have the default removed.
+ :param comment: optional string text of a new comment to add to the
+ column.
+ :param new_column_name: Optional; specify a string name here to
+ indicate the new name within a column rename operation.
+ :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine`
+ type object to specify a change to the column's type.
+ For SQLAlchemy types that also indicate a constraint (i.e.
+ :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`),
+ the constraint is also generated.
+ :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column;
+ currently understood by the MySQL dialect.
+ :param existing_type: Optional; a
+ :class:`~sqlalchemy.types.TypeEngine`
+ type object to specify the previous type. This
+ is required for all MySQL column alter operations that
+ don't otherwise specify a new type, as well as for
+ when nullability is being changed on a SQL Server
+ column. It is also used if the type is a so-called
+ SQLAlchemy "schema" type which may define a constraint (i.e.
+ :class:`~sqlalchemy.types.Boolean`,
+ :class:`~sqlalchemy.types.Enum`),
+ so that the constraint can be dropped.
+ :param existing_server_default: Optional; The existing
+ default value of the column. Required on MySQL if
+ an existing default is not being changed; else MySQL
+ removes the default.
+ :param existing_nullable: Optional; the existing nullability
+ of the column. Required on MySQL if the existing nullability
+ is not being changed; else MySQL sets this to NULL.
+ :param existing_autoincrement: Optional; the existing autoincrement
+ of the column. Used for MySQL's system of altering a column
+ that specifies ``AUTO_INCREMENT``.
+ :param existing_comment: string text of the existing comment on the
+ column to be maintained. Required on MySQL if the existing comment
+ on the column is not being changed.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param postgresql_using: String argument which will indicate a
+ SQL expression to render within the Postgresql-specific USING clause
+ within ALTER COLUMN. This string is taken directly as raw SQL which
+ must explicitly include any necessary quoting or escaping of tokens
+ within the expression.
+
+ """ # noqa: E501
+ ...
+
+ def bulk_insert(
+ self,
+ table: Union[Table, TableClause],
+ rows: List[dict],
+ *,
+ multiinsert: bool = True,
+ ) -> None:
+ """Issue a "bulk insert" operation using the current
+ migration context.
+
+ This provides a means of representing an INSERT of multiple rows
+ which works equally well in the context of executing on a live
+ connection as well as that of generating a SQL script. In the
+ case of a SQL script, the values are rendered inline into the
+ statement.
+
+ e.g.::
+
+ from alembic import op
+ from datetime import date
+ from sqlalchemy.sql import table, column
+ from sqlalchemy import String, Integer, Date
+
+ # Create an ad-hoc table to use for the insert statement.
+ accounts_table = table(
+ "account",
+ column("id", Integer),
+ column("name", String),
+ column("create_date", Date),
+ )
+
+ op.bulk_insert(
+ accounts_table,
+ [
+ {
+ "id": 1,
+ "name": "John Smith",
+ "create_date": date(2010, 10, 5),
+ },
+ {
+ "id": 2,
+ "name": "Ed Williams",
+ "create_date": date(2007, 5, 27),
+ },
+ {
+ "id": 3,
+ "name": "Wendy Jones",
+ "create_date": date(2008, 8, 15),
+ },
+ ],
+ )
+
+ When using --sql mode, some datatypes may not render inline
+ automatically, such as dates and other special types. When this
+ issue is present, :meth:`.Operations.inline_literal` may be used::
+
+ op.bulk_insert(
+ accounts_table,
+ [
+ {
+ "id": 1,
+ "name": "John Smith",
+ "create_date": op.inline_literal("2010-10-05"),
+ },
+ {
+ "id": 2,
+ "name": "Ed Williams",
+ "create_date": op.inline_literal("2007-05-27"),
+ },
+ {
+ "id": 3,
+ "name": "Wendy Jones",
+ "create_date": op.inline_literal("2008-08-15"),
+ },
+ ],
+ multiinsert=False,
+ )
+
+ When using :meth:`.Operations.inline_literal` in conjunction with
+ :meth:`.Operations.bulk_insert`, in order for the statement to work
+ in "online" (e.g. non --sql) mode, the
+ :paramref:`~.Operations.bulk_insert.multiinsert`
+ flag should be set to ``False``, which will have the effect of
+ individual INSERT statements being emitted to the database, each
+ with a distinct VALUES clause, so that the "inline" values can
+ still be rendered, rather than attempting to pass the values
+ as bound parameters.
+
+ :param table: a table object which represents the target of the INSERT.
+
+ :param rows: a list of dictionaries indicating rows.
+
+ :param multiinsert: when at its default of True and --sql mode is not
+ enabled, the INSERT statement will be executed using
+ "executemany()" style, where all elements in the list of
+ dictionaries are passed as bound parameters in a single
+ list. Setting this to False results in individual INSERT
+ statements being emitted per parameter set, and is needed
+ in those cases where non-literal values are present in the
+ parameter sets.
+
+ """ # noqa: E501
+ ...
+
+ def create_check_constraint(
+ self,
+ constraint_name: Optional[str],
+ table_name: str,
+ condition: Union[str, ColumnElement[bool], TextClause],
+ *,
+ schema: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ """Issue a "create check constraint" instruction using the
+ current migration context.
+
+ e.g.::
+
+ from alembic import op
+ from sqlalchemy.sql import column, func
+
+ op.create_check_constraint(
+ "ck_user_name_len",
+ "user",
+ func.len(column("name")) > 5,
+ )
+
+ CHECK constraints are usually against a SQL expression, so ad-hoc
+ table metadata is usually needed. The function will convert the given
+ arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound
+ to an anonymous table in order to emit the CREATE statement.
+
+ :param name: Name of the check constraint. The name is necessary
+ so that an ALTER statement can be emitted. For setups that
+ use an automated naming scheme such as that described at
+ :ref:`sqla:constraint_naming_conventions`,
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param table_name: String name of the source table.
+ :param condition: SQL expression that's the condition of the
+ constraint. Can be a string or SQLAlchemy expression language
+ structure.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
+ NOT DEFERRABLE when issuing DDL for this constraint.
+ :param initially: optional string. If set, emit INITIALLY
+ when issuing DDL for this constraint.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """ # noqa: E501
+ ...
+
+ def create_exclude_constraint(
+ self,
+ constraint_name: str,
+ table_name: str,
+ *elements: Any,
+ **kw: Any,
+ ) -> Optional[Table]:
+ """Issue an alter to create an EXCLUDE constraint using the
+ current migration context.
+
+ .. note:: This method is Postgresql specific, and additionally
+ requires at least SQLAlchemy 1.0.
+
+ e.g.::
+
+ from alembic import op
+
+ op.create_exclude_constraint(
+ "user_excl",
+ "user",
+ ("period", "&&"),
+ ("group", "="),
+ where=("group != 'some group'"),
+ )
+
+ Note that the expressions work the same way as that of
+ the ``ExcludeConstraint`` object itself; if plain strings are
+ passed, quoting rules must be applied manually.
+
+ :param name: Name of the constraint.
+ :param table_name: String name of the source table.
+ :param elements: exclude conditions.
+ :param where: SQL expression or SQL string with optional WHERE
+ clause.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
+ NOT DEFERRABLE when issuing DDL for this constraint.
+ :param initially: optional string. If set, emit INITIALLY
+ when issuing DDL for this constraint.
+ :param schema: Optional schema name to operate within.
+
+ """ # noqa: E501
+ ...
+
+ def create_foreign_key(
+ self,
+ constraint_name: Optional[str],
+ source_table: str,
+ referent_table: str,
+ local_cols: List[str],
+ remote_cols: List[str],
+ *,
+ onupdate: Optional[str] = None,
+ ondelete: Optional[str] = None,
+ deferrable: Optional[bool] = None,
+ initially: Optional[str] = None,
+ match: Optional[str] = None,
+ source_schema: Optional[str] = None,
+ referent_schema: Optional[str] = None,
+ **dialect_kw: Any,
+ ) -> None:
+ """Issue a "create foreign key" instruction using the
+ current migration context.
+
+ e.g.::
+
+ from alembic import op
+
+ op.create_foreign_key(
+ "fk_user_address",
+ "address",
+ "user",
+ ["user_id"],
+ ["id"],
+ )
+
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
+ containing the necessary columns, then generates a new
+ :class:`~sqlalchemy.schema.ForeignKeyConstraint`
+ object which it then associates with the
+ :class:`~sqlalchemy.schema.Table`.
+ Any event listeners associated with this action will be fired
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
+ construct is ultimately used to generate the ALTER statement.
+
+ :param constraint_name: Name of the foreign key constraint. The name
+ is necessary so that an ALTER statement can be emitted. For setups
+ that use an automated naming scheme such as that described at
+ :ref:`sqla:constraint_naming_conventions`,
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param source_table: String name of the source table.
+ :param referent_table: String name of the destination table.
+ :param local_cols: a list of string column names in the
+ source table.
+ :param remote_cols: a list of string column names in the
+ remote table.
+ :param onupdate: Optional string. If set, emit ON UPDATE when
+ issuing DDL for this constraint. Typical values include CASCADE,
+ DELETE and RESTRICT.
+ :param ondelete: Optional string. If set, emit ON DELETE when
+ issuing DDL for this constraint. Typical values include CASCADE,
+ DELETE and RESTRICT.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or NOT
+ DEFERRABLE when issuing DDL for this constraint.
+ :param source_schema: Optional schema name of the source table.
+ :param referent_schema: Optional schema name of the destination table.
+
+ """ # noqa: E501
+ ...
+
+ def create_index(
+ self,
+ index_name: Optional[str],
+ table_name: str,
+ columns: Sequence[Union[str, TextClause, Function[Any]]],
+ *,
+ schema: Optional[str] = None,
+ unique: bool = False,
+ if_not_exists: Optional[bool] = None,
+ **kw: Any,
+ ) -> None:
+ r"""Issue a "create index" instruction using the current
+ migration context.
+
+ e.g.::
+
+ from alembic import op
+
+ op.create_index("ik_test", "t1", ["foo", "bar"])
+
+ Functional indexes can be produced by using the
+ :func:`sqlalchemy.sql.expression.text` construct::
+
+ from alembic import op
+ from sqlalchemy import text
+
+ op.create_index("ik_test", "t1", [text("lower(foo)")])
+
+ :param index_name: name of the index.
+ :param table_name: name of the owning table.
+ :param columns: a list consisting of string column names and/or
+ :func:`~sqlalchemy.sql.expression.text` constructs.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param unique: If True, create a unique index.
+
+ :param quote: Force quoting of this column's name on or off,
+ corresponding to ``True`` or ``False``. When left at its default
+ of ``None``, the column identifier will be quoted according to
+ whether the name is case sensitive (identifiers with at least one
+ upper case character are treated as case sensitive), or if it's a
+ reserved word. This flag is only needed to force quoting of a
+ reserved word which is not known by the SQLAlchemy dialect.
+
+ :param if_not_exists: If True, adds IF NOT EXISTS operator when
+ creating the new index.
+
+ .. versionadded:: 1.12.0
+
+ :param \**kw: Additional keyword arguments not mentioned above are
+ dialect specific, and passed in the form
+ ``_``.
+ See the documentation regarding an individual dialect at
+ :ref:`dialect_toplevel` for detail on documented arguments.
+
+ """ # noqa: E501
+ ...
+
+ def create_primary_key(
+ self,
+ constraint_name: Optional[str],
+ table_name: str,
+ columns: List[str],
+ *,
+ schema: Optional[str] = None,
+ ) -> None:
+ """Issue a "create primary key" instruction using the current
+ migration context.
+
+ e.g.::
+
+ from alembic import op
+
+ op.create_primary_key("pk_my_table", "my_table", ["id", "version"])
+
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
+ containing the necessary columns, then generates a new
+ :class:`~sqlalchemy.schema.PrimaryKeyConstraint`
+ object which it then associates with the
+ :class:`~sqlalchemy.schema.Table`.
+ Any event listeners associated with this action will be fired
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
+ construct is ultimately used to generate the ALTER statement.
+
+ :param constraint_name: Name of the primary key constraint. The name
+ is necessary so that an ALTER statement can be emitted. For setups
+ that use an automated naming scheme such as that described at
+ :ref:`sqla:constraint_naming_conventions`
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param table_name: String name of the target table.
+ :param columns: a list of string column names to be applied to the
+ primary key constraint.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """ # noqa: E501
+ ...
+
+ def create_table(
+ self, table_name: str, *columns: SchemaItem, **kw: Any
+ ) -> Table:
+ r"""Issue a "create table" instruction using the current migration
+ context.
+
+ This directive receives an argument list similar to that of the
+ traditional :class:`sqlalchemy.schema.Table` construct, but without the
+ metadata::
+
+ from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
+ from alembic import op
+
+ op.create_table(
+ "account",
+ Column("id", INTEGER, primary_key=True),
+ Column("name", VARCHAR(50), nullable=False),
+ Column("description", NVARCHAR(200)),
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
+ )
+
+ Note that :meth:`.create_table` accepts
+ :class:`~sqlalchemy.schema.Column`
+ constructs directly from the SQLAlchemy library. In particular,
+ default values to be created on the database side are
+ specified using the ``server_default`` parameter, and not
+ ``default`` which only specifies Python-side defaults::
+
+ from alembic import op
+ from sqlalchemy import Column, TIMESTAMP, func
+
+ # specify "DEFAULT NOW" along with the "timestamp" column
+ op.create_table(
+ "account",
+ Column("id", INTEGER, primary_key=True),
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
+ )
+
+ The function also returns a newly created
+ :class:`~sqlalchemy.schema.Table` object, corresponding to the table
+ specification given, which is suitable for
+ immediate SQL operations, in particular
+ :meth:`.Operations.bulk_insert`::
+
+ from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
+ from alembic import op
+
+ account_table = op.create_table(
+ "account",
+ Column("id", INTEGER, primary_key=True),
+ Column("name", VARCHAR(50), nullable=False),
+ Column("description", NVARCHAR(200)),
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
+ )
+
+ op.bulk_insert(
+ account_table,
+ [
+ {"name": "A1", "description": "account 1"},
+ {"name": "A2", "description": "account 2"},
+ ],
+ )
+
+ :param table_name: Name of the table
+ :param \*columns: collection of :class:`~sqlalchemy.schema.Column`
+ objects within
+ the table, as well as optional :class:`~sqlalchemy.schema.Constraint`
+ objects
+ and :class:`~.sqlalchemy.schema.Index` objects.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param \**kw: Other keyword arguments are passed to the underlying
+ :class:`sqlalchemy.schema.Table` object created for the command.
+
+ :return: the :class:`~sqlalchemy.schema.Table` object corresponding
+ to the parameters given.
+
+ """ # noqa: E501
+ ...
+
+ def create_table_comment(
+ self,
+ table_name: str,
+ comment: Optional[str],
+ *,
+ existing_comment: Optional[str] = None,
+ schema: Optional[str] = None,
+ ) -> None:
+ """Emit a COMMENT ON operation to set the comment for a table.
+
+ :param table_name: string name of the target table.
+ :param comment: string value of the comment being registered against
+ the specified table.
+ :param existing_comment: String value of a comment
+ already registered on the specified table, used within autogenerate
+ so that the operation is reversible, but not required for direct
+ use.
+
+ .. seealso::
+
+ :meth:`.Operations.drop_table_comment`
+
+ :paramref:`.Operations.alter_column.comment`
+
+ """ # noqa: E501
+ ...
+
+ def create_unique_constraint(
+ self,
+ constraint_name: Optional[str],
+ table_name: str,
+ columns: Sequence[str],
+ *,
+ schema: Optional[str] = None,
+ **kw: Any,
+ ) -> Any:
+ """Issue a "create unique constraint" instruction using the
+ current migration context.
+
+ e.g.::
+
+ from alembic import op
+ op.create_unique_constraint("uq_user_name", "user", ["name"])
+
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
+ containing the necessary columns, then generates a new
+ :class:`~sqlalchemy.schema.UniqueConstraint`
+ object which it then associates with the
+ :class:`~sqlalchemy.schema.Table`.
+ Any event listeners associated with this action will be fired
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
+ construct is ultimately used to generate the ALTER statement.
+
+ :param name: Name of the unique constraint. The name is necessary
+ so that an ALTER statement can be emitted. For setups that
+ use an automated naming scheme such as that described at
+ :ref:`sqla:constraint_naming_conventions`,
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param table_name: String name of the source table.
+ :param columns: a list of string column names in the
+ source table.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
+ NOT DEFERRABLE when issuing DDL for this constraint.
+ :param initially: optional string. If set, emit INITIALLY
+ when issuing DDL for this constraint.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """ # noqa: E501
+ ...
+
+ def drop_column(
+ self,
+ table_name: str,
+ column_name: str,
+ *,
+ schema: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ """Issue a "drop column" instruction using the current
+ migration context.
+
+ e.g.::
+
+ drop_column("organization", "account_id")
+
+ :param table_name: name of table
+ :param column_name: name of column
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param mssql_drop_check: Optional boolean. When ``True``, on
+ Microsoft SQL Server only, first
+ drop the CHECK constraint on the column using a
+ SQL-script-compatible
+ block that selects into a @variable from sys.check_constraints,
+ then exec's a separate DROP CONSTRAINT for that constraint.
+ :param mssql_drop_default: Optional boolean. When ``True``, on
+ Microsoft SQL Server only, first
+ drop the DEFAULT constraint on the column using a
+ SQL-script-compatible
+ block that selects into a @variable from sys.default_constraints,
+ then exec's a separate DROP CONSTRAINT for that default.
+ :param mssql_drop_foreign_key: Optional boolean. When ``True``, on
+ Microsoft SQL Server only, first
+ drop a single FOREIGN KEY constraint on the column using a
+ SQL-script-compatible
+ block that selects into a @variable from
+ sys.foreign_keys/sys.foreign_key_columns,
+ then exec's a separate DROP CONSTRAINT for that default. Only
+ works if the column has exactly one FK constraint which refers to
+ it, at the moment.
+
+ """ # noqa: E501
+ ...
+
+ def drop_constraint(
+ self,
+ constraint_name: str,
+ table_name: str,
+ type_: Optional[str] = None,
+ *,
+ schema: Optional[str] = None,
+ ) -> None:
+ r"""Drop a constraint of the given name, typically via DROP CONSTRAINT.
+
+ :param constraint_name: name of the constraint.
+ :param table_name: table name.
+ :param type\_: optional, required on MySQL. can be
+ 'foreignkey', 'primary', 'unique', or 'check'.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """ # noqa: E501
+ ...
+
+ def drop_index(
+ self,
+ index_name: str,
+ table_name: Optional[str] = None,
+ *,
+ schema: Optional[str] = None,
+ if_exists: Optional[bool] = None,
+ **kw: Any,
+ ) -> None:
+ r"""Issue a "drop index" instruction using the current
+ migration context.
+
+ e.g.::
+
+ drop_index("accounts")
+
+ :param index_name: name of the index.
+ :param table_name: name of the owning table. Some
+ backends such as Microsoft SQL Server require this.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ :param if_exists: If True, adds IF EXISTS operator when
+ dropping the index.
+
+ .. versionadded:: 1.12.0
+
+ :param \**kw: Additional keyword arguments not mentioned above are
+ dialect specific, and passed in the form
+ ``_``.
+ See the documentation regarding an individual dialect at
+ :ref:`dialect_toplevel` for detail on documented arguments.
+
+ """ # noqa: E501
+ ...
+
+ def drop_table(
+ self, table_name: str, *, schema: Optional[str] = None, **kw: Any
+ ) -> None:
+ r"""Issue a "drop table" instruction using the current
+ migration context.
+
+
+ e.g.::
+
+ drop_table("accounts")
+
+ :param table_name: Name of the table
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param \**kw: Other keyword arguments are passed to the underlying
+ :class:`sqlalchemy.schema.Table` object created for the command.
+
+ """ # noqa: E501
+ ...
+
+ def drop_table_comment(
+ self,
+ table_name: str,
+ *,
+ existing_comment: Optional[str] = None,
+ schema: Optional[str] = None,
+ ) -> None:
+ """Issue a "drop table comment" operation to
+ remove an existing comment set on a table.
+
+ :param table_name: string name of the target table.
+ :param existing_comment: An optional string value of a comment already
+ registered on the specified table.
+
+ .. seealso::
+
+ :meth:`.Operations.create_table_comment`
+
+ :paramref:`.Operations.alter_column.comment`
+
+ """ # noqa: E501
+ ...
+
+ def execute(
+ self,
+ sqltext: Union[Executable, str],
+ *,
+ execution_options: Optional[dict[str, Any]] = None,
+ ) -> None:
+ r"""Execute the given SQL using the current migration context.
+
+ The given SQL can be a plain string, e.g.::
+
+ op.execute("INSERT INTO table (foo) VALUES ('some value')")
+
+ Or it can be any kind of Core SQL Expression construct, such as
+ below where we use an update construct::
+
+ from sqlalchemy.sql import table, column
+ from sqlalchemy import String
+ from alembic import op
+
+ account = table("account", column("name", String))
+ op.execute(
+ account.update()
+ .where(account.c.name == op.inline_literal("account 1"))
+ .values({"name": op.inline_literal("account 2")})
+ )
+
+ Above, we made use of the SQLAlchemy
+ :func:`sqlalchemy.sql.expression.table` and
+ :func:`sqlalchemy.sql.expression.column` constructs to make a brief,
+ ad-hoc table construct just for our UPDATE statement. A full
+ :class:`~sqlalchemy.schema.Table` construct of course works perfectly
+ fine as well, though note it's a recommended practice to at least
+ ensure the definition of a table is self-contained within the migration
+ script, rather than imported from a module that may break compatibility
+ with older migrations.
+
+ In a SQL script context, the statement is emitted directly to the
+ output stream. There is *no* return result, however, as this
+ function is oriented towards generating a change script
+ that can run in "offline" mode. Additionally, parameterized
+ statements are discouraged here, as they *will not work* in offline
+ mode. Above, we use :meth:`.inline_literal` where parameters are
+ to be used.
+
+ For full interaction with a connected database where parameters can
+ also be used normally, use the "bind" available from the context::
+
+ from alembic import op
+
+ connection = op.get_bind()
+
+ connection.execute(
+ account.update()
+ .where(account.c.name == "account 1")
+ .values({"name": "account 2"})
+ )
+
+ Additionally, when passing the statement as a plain string, it is first
+ coerced into a :func:`sqlalchemy.sql.expression.text` construct
+ before being passed along. In the less likely case that the
+ literal SQL string contains a colon, it must be escaped with a
+ backslash, as::
+
+ op.execute(r"INSERT INTO table (foo) VALUES ('\:colon_value')")
+
+
+ :param sqltext: Any legal SQLAlchemy expression, including:
+
+ * a string
+ * a :func:`sqlalchemy.sql.expression.text` construct.
+ * a :func:`sqlalchemy.sql.expression.insert` construct.
+ * a :func:`sqlalchemy.sql.expression.update` construct.
+ * a :func:`sqlalchemy.sql.expression.delete` construct.
+ * Any "executable" described in SQLAlchemy Core documentation,
+ noting that no result set is returned.
+
+ .. note:: when passing a plain string, the statement is coerced into
+ a :func:`sqlalchemy.sql.expression.text` construct. This construct
+ considers symbols with colons, e.g. ``:foo`` to be bound parameters.
+ To avoid this, ensure that colon symbols are escaped, e.g.
+ ``\:foo``.
+
+ :param execution_options: Optional dictionary of
+ execution options, will be passed to
+ :meth:`sqlalchemy.engine.Connection.execution_options`.
+ """ # noqa: E501
+ ...
+
+ def rename_table(
+ self,
+ old_table_name: str,
+ new_table_name: str,
+ *,
+ schema: Optional[str] = None,
+ ) -> None:
+ """Emit an ALTER TABLE to rename a table.
+
+ :param old_table_name: old name.
+ :param new_table_name: new name.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """ # noqa: E501
+ ...
+
+ # END STUB FUNCTIONS: op_cls
+
+
+class BatchOperations(AbstractOperations):
+ """Modifies the interface :class:`.Operations` for batch mode.
+
+ This basically omits the ``table_name`` and ``schema`` parameters
+ from associated methods, as these are a given when running under batch
+ mode.
+
+ .. seealso::
+
+ :meth:`.Operations.batch_alter_table`
+
+ Note that as of 0.8, most of the methods on this class are produced
+ dynamically using the :meth:`.Operations.register_operation`
+ method.
+
+ """
+
+ impl: BatchOperationsImpl
+
+ def _noop(self, operation):
+ raise NotImplementedError(
+ "The %s method does not apply to a batch table alter operation."
+ % operation
+ )
+
+ if TYPE_CHECKING:
+ # START STUB FUNCTIONS: batch_op
+ # ### the following stubs are generated by tools/write_pyi.py ###
+ # ### do not edit ###
+
+ def add_column(
+ self,
+ column: Column[Any],
+ *,
+ insert_before: Optional[str] = None,
+ insert_after: Optional[str] = None,
+ ) -> None:
+ """Issue an "add column" instruction using the current
+ batch migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.add_column`
+
+ """ # noqa: E501
+ ...
+
+ def alter_column(
+ self,
+ column_name: str,
+ *,
+ nullable: Optional[bool] = None,
+ comment: Union[str, Literal[False], None] = False,
+ server_default: Any = False,
+ new_column_name: Optional[str] = None,
+ type_: Union[TypeEngine, Type[TypeEngine], None] = None,
+ existing_type: Union[TypeEngine, Type[TypeEngine], None] = None,
+ existing_server_default: Union[
+ str, bool, Identity, Computed, None
+ ] = False,
+ existing_nullable: Optional[bool] = None,
+ existing_comment: Optional[str] = None,
+ insert_before: Optional[str] = None,
+ insert_after: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ """Issue an "alter column" instruction using the current
+ batch migration context.
+
+ Parameters are the same as that of :meth:`.Operations.alter_column`,
+ as well as the following option(s):
+
+ :param insert_before: String name of an existing column which this
+ column should be placed before, when creating the new table.
+
+ :param insert_after: String name of an existing column which this
+ column should be placed after, when creating the new table. If
+ both :paramref:`.BatchOperations.alter_column.insert_before`
+ and :paramref:`.BatchOperations.alter_column.insert_after` are
+ omitted, the column is inserted after the last existing column
+ in the table.
+
+ .. seealso::
+
+ :meth:`.Operations.alter_column`
+
+
+ """ # noqa: E501
+ ...
+
+ def create_check_constraint(
+ self,
+ constraint_name: str,
+ condition: Union[str, ColumnElement[bool], TextClause],
+ **kw: Any,
+ ) -> None:
+ """Issue a "create check constraint" instruction using the
+ current batch migration context.
+
+ The batch form of this call omits the ``source`` and ``schema``
+ arguments from the call.
+
+ .. seealso::
+
+ :meth:`.Operations.create_check_constraint`
+
+ """ # noqa: E501
+ ...
+
+ def create_exclude_constraint(
+ self, constraint_name: str, *elements: Any, **kw: Any
+ ):
+ """Issue a "create exclude constraint" instruction using the
+ current batch migration context.
+
+ .. note:: This method is Postgresql specific, and additionally
+ requires at least SQLAlchemy 1.0.
+
+ .. seealso::
+
+ :meth:`.Operations.create_exclude_constraint`
+
+ """ # noqa: E501
+ ...
+
+ def create_foreign_key(
+ self,
+ constraint_name: str,
+ referent_table: str,
+ local_cols: List[str],
+ remote_cols: List[str],
+ *,
+ referent_schema: Optional[str] = None,
+ onupdate: Optional[str] = None,
+ ondelete: Optional[str] = None,
+ deferrable: Optional[bool] = None,
+ initially: Optional[str] = None,
+ match: Optional[str] = None,
+ **dialect_kw: Any,
+ ) -> None:
+ """Issue a "create foreign key" instruction using the
+ current batch migration context.
+
+ The batch form of this call omits the ``source`` and ``source_schema``
+ arguments from the call.
+
+ e.g.::
+
+ with batch_alter_table("address") as batch_op:
+ batch_op.create_foreign_key(
+ "fk_user_address",
+ "user",
+ ["user_id"],
+ ["id"],
+ )
+
+ .. seealso::
+
+ :meth:`.Operations.create_foreign_key`
+
+ """ # noqa: E501
+ ...
+
+ def create_index(
+ self, index_name: str, columns: List[str], **kw: Any
+ ) -> None:
+ """Issue a "create index" instruction using the
+ current batch migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.create_index`
+
+ """ # noqa: E501
+ ...
+
+ def create_primary_key(
+ self, constraint_name: str, columns: List[str]
+ ) -> None:
+ """Issue a "create primary key" instruction using the
+ current batch migration context.
+
+ The batch form of this call omits the ``table_name`` and ``schema``
+ arguments from the call.
+
+ .. seealso::
+
+ :meth:`.Operations.create_primary_key`
+
+ """ # noqa: E501
+ ...
+
+ def create_table_comment(
+ self,
+ comment: Optional[str],
+ *,
+ existing_comment: Optional[str] = None,
+ ) -> None:
+ """Emit a COMMENT ON operation to set the comment for a table
+ using the current batch migration context.
+
+ :param comment: string value of the comment being registered against
+ the specified table.
+ :param existing_comment: String value of a comment
+ already registered on the specified table, used within autogenerate
+ so that the operation is reversible, but not required for direct
+ use.
+
+ """ # noqa: E501
+ ...
+
+ def create_unique_constraint(
+ self, constraint_name: str, columns: Sequence[str], **kw: Any
+ ) -> Any:
+ """Issue a "create unique constraint" instruction using the
+ current batch migration context.
+
+ The batch form of this call omits the ``source`` and ``schema``
+ arguments from the call.
+
+ .. seealso::
+
+ :meth:`.Operations.create_unique_constraint`
+
+ """ # noqa: E501
+ ...
+
+ def drop_column(self, column_name: str, **kw: Any) -> None:
+ """Issue a "drop column" instruction using the current
+ batch migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.drop_column`
+
+ """ # noqa: E501
+ ...
+
+ def drop_constraint(
+ self, constraint_name: str, type_: Optional[str] = None
+ ) -> None:
+ """Issue a "drop constraint" instruction using the
+ current batch migration context.
+
+ The batch form of this call omits the ``table_name`` and ``schema``
+ arguments from the call.
+
+ .. seealso::
+
+ :meth:`.Operations.drop_constraint`
+
+ """ # noqa: E501
+ ...
+
+ def drop_index(self, index_name: str, **kw: Any) -> None:
+ """Issue a "drop index" instruction using the
+ current batch migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.drop_index`
+
+ """ # noqa: E501
+ ...
+
+ def drop_table_comment(
+ self, *, existing_comment: Optional[str] = None
+ ) -> None:
+ """Issue a "drop table comment" operation to
+ remove an existing comment set on a table using the current
+ batch operations context.
+
+ :param existing_comment: An optional string value of a comment already
+ registered on the specified table.
+
+ """ # noqa: E501
+ ...
+
+ def execute(
+ self,
+ sqltext: Union[Executable, str],
+ *,
+ execution_options: Optional[dict[str, Any]] = None,
+ ) -> None:
+ """Execute the given SQL using the current migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.execute`
+
+ """ # noqa: E501
+ ...
+
+ # END STUB FUNCTIONS: batch_op
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/operations/batch.py b/Backend/venv/lib/python3.12/site-packages/alembic/operations/batch.py
new file mode 100644
index 00000000..8c88e885
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/operations/batch.py
@@ -0,0 +1,718 @@
+from __future__ import annotations
+
+from typing import Any
+from typing import Dict
+from typing import List
+from typing import Optional
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+from sqlalchemy import CheckConstraint
+from sqlalchemy import Column
+from sqlalchemy import ForeignKeyConstraint
+from sqlalchemy import Index
+from sqlalchemy import MetaData
+from sqlalchemy import PrimaryKeyConstraint
+from sqlalchemy import schema as sql_schema
+from sqlalchemy import Table
+from sqlalchemy import types as sqltypes
+from sqlalchemy.events import SchemaEventTarget
+from sqlalchemy.util import OrderedDict
+from sqlalchemy.util import topological
+
+from ..util import exc
+from ..util.sqla_compat import _columns_for_constraint
+from ..util.sqla_compat import _copy
+from ..util.sqla_compat import _copy_expression
+from ..util.sqla_compat import _ensure_scope_for_ddl
+from ..util.sqla_compat import _fk_is_self_referential
+from ..util.sqla_compat import _idx_table_bound_expressions
+from ..util.sqla_compat import _insert_inline
+from ..util.sqla_compat import _is_type_bound
+from ..util.sqla_compat import _remove_column_from_collection
+from ..util.sqla_compat import _resolve_for_variant
+from ..util.sqla_compat import _select
+from ..util.sqla_compat import constraint_name_defined
+from ..util.sqla_compat import constraint_name_string
+
+if TYPE_CHECKING:
+ from typing import Literal
+
+ from sqlalchemy.engine import Dialect
+ from sqlalchemy.sql.elements import ColumnClause
+ from sqlalchemy.sql.elements import quoted_name
+ from sqlalchemy.sql.functions import Function
+ from sqlalchemy.sql.schema import Constraint
+ from sqlalchemy.sql.type_api import TypeEngine
+
+ from ..ddl.impl import DefaultImpl
+
+
+class BatchOperationsImpl:
+ def __init__(
+ self,
+ operations,
+ table_name,
+ schema,
+ recreate,
+ copy_from,
+ table_args,
+ table_kwargs,
+ reflect_args,
+ reflect_kwargs,
+ naming_convention,
+ partial_reordering,
+ ):
+ self.operations = operations
+ self.table_name = table_name
+ self.schema = schema
+ if recreate not in ("auto", "always", "never"):
+ raise ValueError(
+ "recreate may be one of 'auto', 'always', or 'never'."
+ )
+ self.recreate = recreate
+ self.copy_from = copy_from
+ self.table_args = table_args
+ self.table_kwargs = dict(table_kwargs)
+ self.reflect_args = reflect_args
+ self.reflect_kwargs = dict(reflect_kwargs)
+ self.reflect_kwargs.setdefault(
+ "listeners", list(self.reflect_kwargs.get("listeners", ()))
+ )
+ self.reflect_kwargs["listeners"].append(
+ ("column_reflect", operations.impl.autogen_column_reflect)
+ )
+ self.naming_convention = naming_convention
+ self.partial_reordering = partial_reordering
+ self.batch = []
+
+ @property
+ def dialect(self) -> Dialect:
+ return self.operations.impl.dialect
+
+ @property
+ def impl(self) -> DefaultImpl:
+ return self.operations.impl
+
+ def _should_recreate(self) -> bool:
+ if self.recreate == "auto":
+ return self.operations.impl.requires_recreate_in_batch(self)
+ elif self.recreate == "always":
+ return True
+ else:
+ return False
+
+ def flush(self) -> None:
+ should_recreate = self._should_recreate()
+
+ with _ensure_scope_for_ddl(self.impl.connection):
+ if not should_recreate:
+ for opname, arg, kw in self.batch:
+ fn = getattr(self.operations.impl, opname)
+ fn(*arg, **kw)
+ else:
+ if self.naming_convention:
+ m1 = MetaData(naming_convention=self.naming_convention)
+ else:
+ m1 = MetaData()
+
+ if self.copy_from is not None:
+ existing_table = self.copy_from
+ reflected = False
+ else:
+ if self.operations.migration_context.as_sql:
+ raise exc.CommandError(
+ f"This operation cannot proceed in --sql mode; "
+ f"batch mode with dialect "
+ f"{self.operations.migration_context.dialect.name} " # noqa: E501
+ f"requires a live database connection with which "
+ f'to reflect the table "{self.table_name}". '
+ f"To generate a batch SQL migration script using "
+ "table "
+ '"move and copy", a complete Table object '
+ f'should be passed to the "copy_from" argument '
+ "of the batch_alter_table() method so that table "
+ "reflection can be skipped."
+ )
+
+ existing_table = Table(
+ self.table_name,
+ m1,
+ schema=self.schema,
+ autoload_with=self.operations.get_bind(),
+ *self.reflect_args,
+ **self.reflect_kwargs,
+ )
+ reflected = True
+
+ batch_impl = ApplyBatchImpl(
+ self.impl,
+ existing_table,
+ self.table_args,
+ self.table_kwargs,
+ reflected,
+ partial_reordering=self.partial_reordering,
+ )
+ for opname, arg, kw in self.batch:
+ fn = getattr(batch_impl, opname)
+ fn(*arg, **kw)
+
+ batch_impl._create(self.impl)
+
+ def alter_column(self, *arg, **kw) -> None:
+ self.batch.append(("alter_column", arg, kw))
+
+ def add_column(self, *arg, **kw) -> None:
+ if (
+ "insert_before" in kw or "insert_after" in kw
+ ) and not self._should_recreate():
+ raise exc.CommandError(
+ "Can't specify insert_before or insert_after when using "
+ "ALTER; please specify recreate='always'"
+ )
+ self.batch.append(("add_column", arg, kw))
+
+ def drop_column(self, *arg, **kw) -> None:
+ self.batch.append(("drop_column", arg, kw))
+
+ def add_constraint(self, const: Constraint) -> None:
+ self.batch.append(("add_constraint", (const,), {}))
+
+ def drop_constraint(self, const: Constraint) -> None:
+ self.batch.append(("drop_constraint", (const,), {}))
+
+ def rename_table(self, *arg, **kw):
+ self.batch.append(("rename_table", arg, kw))
+
+ def create_index(self, idx: Index, **kw: Any) -> None:
+ self.batch.append(("create_index", (idx,), kw))
+
+ def drop_index(self, idx: Index, **kw: Any) -> None:
+ self.batch.append(("drop_index", (idx,), kw))
+
+ def create_table_comment(self, table):
+ self.batch.append(("create_table_comment", (table,), {}))
+
+ def drop_table_comment(self, table):
+ self.batch.append(("drop_table_comment", (table,), {}))
+
+ def create_table(self, table):
+ raise NotImplementedError("Can't create table in batch mode")
+
+ def drop_table(self, table):
+ raise NotImplementedError("Can't drop table in batch mode")
+
+ def create_column_comment(self, column):
+ self.batch.append(("create_column_comment", (column,), {}))
+
+
+class ApplyBatchImpl:
+ def __init__(
+ self,
+ impl: DefaultImpl,
+ table: Table,
+ table_args: tuple,
+ table_kwargs: Dict[str, Any],
+ reflected: bool,
+ partial_reordering: tuple = (),
+ ) -> None:
+ self.impl = impl
+ self.table = table # this is a Table object
+ self.table_args = table_args
+ self.table_kwargs = table_kwargs
+ self.temp_table_name = self._calc_temp_name(table.name)
+ self.new_table: Optional[Table] = None
+
+ self.partial_reordering = partial_reordering # tuple of tuples
+ self.add_col_ordering: Tuple[
+ Tuple[str, str], ...
+ ] = () # tuple of tuples
+
+ self.column_transfers = OrderedDict(
+ (c.name, {"expr": c}) for c in self.table.c
+ )
+ self.existing_ordering = list(self.column_transfers)
+
+ self.reflected = reflected
+ self._grab_table_elements()
+
+ @classmethod
+ def _calc_temp_name(cls, tablename: Union[quoted_name, str]) -> str:
+ return ("_alembic_tmp_%s" % tablename)[0:50]
+
+ def _grab_table_elements(self) -> None:
+ schema = self.table.schema
+ self.columns: Dict[str, Column[Any]] = OrderedDict()
+ for c in self.table.c:
+ c_copy = _copy(c, schema=schema)
+ c_copy.unique = c_copy.index = False
+ # ensure that the type object was copied,
+ # as we may need to modify it in-place
+ if isinstance(c.type, SchemaEventTarget):
+ assert c_copy.type is not c.type
+ self.columns[c.name] = c_copy
+ self.named_constraints: Dict[str, Constraint] = {}
+ self.unnamed_constraints = []
+ self.col_named_constraints = {}
+ self.indexes: Dict[str, Index] = {}
+ self.new_indexes: Dict[str, Index] = {}
+
+ for const in self.table.constraints:
+ if _is_type_bound(const):
+ continue
+ elif (
+ self.reflected
+ and isinstance(const, CheckConstraint)
+ and not const.name
+ ):
+ # TODO: we are skipping unnamed reflected CheckConstraint
+ # because
+ # we have no way to determine _is_type_bound() for these.
+ pass
+ elif constraint_name_string(const.name):
+ self.named_constraints[const.name] = const
+ else:
+ self.unnamed_constraints.append(const)
+
+ if not self.reflected:
+ for col in self.table.c:
+ for const in col.constraints:
+ if const.name:
+ self.col_named_constraints[const.name] = (col, const)
+
+ for idx in self.table.indexes:
+ self.indexes[idx.name] = idx # type: ignore[index]
+
+ for k in self.table.kwargs:
+ self.table_kwargs.setdefault(k, self.table.kwargs[k])
+
+ def _adjust_self_columns_for_partial_reordering(self) -> None:
+ pairs = set()
+
+ col_by_idx = list(self.columns)
+
+ if self.partial_reordering:
+ for tuple_ in self.partial_reordering:
+ for index, elem in enumerate(tuple_):
+ if index > 0:
+ pairs.add((tuple_[index - 1], elem))
+ else:
+ for index, elem in enumerate(self.existing_ordering):
+ if index > 0:
+ pairs.add((col_by_idx[index - 1], elem))
+
+ pairs.update(self.add_col_ordering)
+
+ # this can happen if some columns were dropped and not removed
+ # from existing_ordering. this should be prevented already, but
+ # conservatively making sure this didn't happen
+ pairs_list = [p for p in pairs if p[0] != p[1]]
+
+ sorted_ = list(
+ topological.sort(pairs_list, col_by_idx, deterministic_order=True)
+ )
+ self.columns = OrderedDict((k, self.columns[k]) for k in sorted_)
+ self.column_transfers = OrderedDict(
+ (k, self.column_transfers[k]) for k in sorted_
+ )
+
+ def _transfer_elements_to_new_table(self) -> None:
+ assert self.new_table is None, "Can only create new table once"
+
+ m = MetaData()
+ schema = self.table.schema
+
+ if self.partial_reordering or self.add_col_ordering:
+ self._adjust_self_columns_for_partial_reordering()
+
+ self.new_table = new_table = Table(
+ self.temp_table_name,
+ m,
+ *(list(self.columns.values()) + list(self.table_args)),
+ schema=schema,
+ **self.table_kwargs,
+ )
+
+ for const in (
+ list(self.named_constraints.values()) + self.unnamed_constraints
+ ):
+ const_columns = {c.key for c in _columns_for_constraint(const)}
+
+ if not const_columns.issubset(self.column_transfers):
+ continue
+
+ const_copy: Constraint
+ if isinstance(const, ForeignKeyConstraint):
+ if _fk_is_self_referential(const):
+ # for self-referential constraint, refer to the
+ # *original* table name, and not _alembic_batch_temp.
+ # This is consistent with how we're handling
+ # FK constraints from other tables; we assume SQLite
+ # no foreign keys just keeps the names unchanged, so
+ # when we rename back, they match again.
+ const_copy = _copy(
+ const, schema=schema, target_table=self.table
+ )
+ else:
+ # "target_table" for ForeignKeyConstraint.copy() is
+ # only used if the FK is detected as being
+ # self-referential, which we are handling above.
+ const_copy = _copy(const, schema=schema)
+ else:
+ const_copy = _copy(
+ const, schema=schema, target_table=new_table
+ )
+ if isinstance(const, ForeignKeyConstraint):
+ self._setup_referent(m, const)
+ new_table.append_constraint(const_copy)
+
+ def _gather_indexes_from_both_tables(self) -> List[Index]:
+ assert self.new_table is not None
+ idx: List[Index] = []
+
+ for idx_existing in self.indexes.values():
+ # this is a lift-and-move from Table.to_metadata
+
+ if idx_existing._column_flag: # type: ignore
+ continue
+
+ idx_copy = Index(
+ idx_existing.name,
+ unique=idx_existing.unique,
+ *[
+ _copy_expression(expr, self.new_table)
+ for expr in _idx_table_bound_expressions(idx_existing)
+ ],
+ _table=self.new_table,
+ **idx_existing.kwargs,
+ )
+ idx.append(idx_copy)
+
+ for index in self.new_indexes.values():
+ idx.append(
+ Index(
+ index.name,
+ unique=index.unique,
+ *[self.new_table.c[col] for col in index.columns.keys()],
+ **index.kwargs,
+ )
+ )
+ return idx
+
+ def _setup_referent(
+ self, metadata: MetaData, constraint: ForeignKeyConstraint
+ ) -> None:
+ spec = constraint.elements[
+ 0
+ ]._get_colspec() # type:ignore[attr-defined]
+ parts = spec.split(".")
+ tname = parts[-2]
+ if len(parts) == 3:
+ referent_schema = parts[0]
+ else:
+ referent_schema = None
+
+ if tname != self.temp_table_name:
+ key = sql_schema._get_table_key(tname, referent_schema)
+
+ def colspec(elem: Any):
+ return elem._get_colspec()
+
+ if key in metadata.tables:
+ t = metadata.tables[key]
+ for elem in constraint.elements:
+ colname = colspec(elem).split(".")[-1]
+ if colname not in t.c:
+ t.append_column(Column(colname, sqltypes.NULLTYPE))
+ else:
+ Table(
+ tname,
+ metadata,
+ *[
+ Column(n, sqltypes.NULLTYPE)
+ for n in [
+ colspec(elem).split(".")[-1]
+ for elem in constraint.elements
+ ]
+ ],
+ schema=referent_schema,
+ )
+
+ def _create(self, op_impl: DefaultImpl) -> None:
+ self._transfer_elements_to_new_table()
+
+ op_impl.prep_table_for_batch(self, self.table)
+ assert self.new_table is not None
+ op_impl.create_table(self.new_table)
+
+ try:
+ op_impl._exec(
+ _insert_inline(self.new_table).from_select(
+ list(
+ k
+ for k, transfer in self.column_transfers.items()
+ if "expr" in transfer
+ ),
+ _select(
+ *[
+ transfer["expr"]
+ for transfer in self.column_transfers.values()
+ if "expr" in transfer
+ ]
+ ),
+ )
+ )
+ op_impl.drop_table(self.table)
+ except:
+ op_impl.drop_table(self.new_table)
+ raise
+ else:
+ op_impl.rename_table(
+ self.temp_table_name, self.table.name, schema=self.table.schema
+ )
+ self.new_table.name = self.table.name
+ try:
+ for idx in self._gather_indexes_from_both_tables():
+ op_impl.create_index(idx)
+ finally:
+ self.new_table.name = self.temp_table_name
+
+ def alter_column(
+ self,
+ table_name: str,
+ column_name: str,
+ nullable: Optional[bool] = None,
+ server_default: Optional[Union[Function[Any], str, bool]] = False,
+ name: Optional[str] = None,
+ type_: Optional[TypeEngine] = None,
+ autoincrement: Optional[Union[bool, Literal["auto"]]] = None,
+ comment: Union[str, Literal[False]] = False,
+ **kw,
+ ) -> None:
+ existing = self.columns[column_name]
+ existing_transfer: Dict[str, Any] = self.column_transfers[column_name]
+ if name is not None and name != column_name:
+ # note that we don't change '.key' - we keep referring
+ # to the renamed column by its old key in _create(). neat!
+ existing.name = name
+ existing_transfer["name"] = name
+
+ existing_type = kw.get("existing_type", None)
+ if existing_type:
+ resolved_existing_type = _resolve_for_variant(
+ kw["existing_type"], self.impl.dialect
+ )
+
+ # pop named constraints for Boolean/Enum for rename
+ if (
+ isinstance(resolved_existing_type, SchemaEventTarget)
+ and resolved_existing_type.name # type:ignore[attr-defined] # noqa E501
+ ):
+ self.named_constraints.pop(
+ resolved_existing_type.name, # type:ignore[attr-defined] # noqa E501
+ None,
+ )
+
+ if type_ is not None:
+ type_ = sqltypes.to_instance(type_)
+ # old type is being discarded so turn off eventing
+ # rules. Alternatively we can
+ # erase the events set up by this type, but this is simpler.
+ # we also ignore the drop_constraint that will come here from
+ # Operations.implementation_for(alter_column)
+
+ if isinstance(existing.type, SchemaEventTarget):
+ existing.type._create_events = ( # type:ignore[attr-defined]
+ existing.type.create_constraint # type:ignore[attr-defined] # noqa
+ ) = False
+
+ self.impl.cast_for_batch_migrate(
+ existing, existing_transfer, type_
+ )
+
+ existing.type = type_
+
+ # we *dont* however set events for the new type, because
+ # alter_column is invoked from
+ # Operations.implementation_for(alter_column) which already
+ # will emit an add_constraint()
+
+ if nullable is not None:
+ existing.nullable = nullable
+ if server_default is not False:
+ if server_default is None:
+ existing.server_default = None
+ else:
+ sql_schema.DefaultClause(
+ server_default # type: ignore[arg-type]
+ )._set_parent( # type:ignore[attr-defined]
+ existing
+ )
+ if autoincrement is not None:
+ existing.autoincrement = bool(autoincrement)
+
+ if comment is not False:
+ existing.comment = comment
+
+ def _setup_dependencies_for_add_column(
+ self,
+ colname: str,
+ insert_before: Optional[str],
+ insert_after: Optional[str],
+ ) -> None:
+ index_cols = self.existing_ordering
+ col_indexes = {name: i for i, name in enumerate(index_cols)}
+
+ if not self.partial_reordering:
+ if insert_after:
+ if not insert_before:
+ if insert_after in col_indexes:
+ # insert after an existing column
+ idx = col_indexes[insert_after] + 1
+ if idx < len(index_cols):
+ insert_before = index_cols[idx]
+ else:
+ # insert after a column that is also new
+ insert_before = dict(self.add_col_ordering)[
+ insert_after
+ ]
+ if insert_before:
+ if not insert_after:
+ if insert_before in col_indexes:
+ # insert before an existing column
+ idx = col_indexes[insert_before] - 1
+ if idx >= 0:
+ insert_after = index_cols[idx]
+ else:
+ # insert before a column that is also new
+ insert_after = {
+ b: a for a, b in self.add_col_ordering
+ }[insert_before]
+
+ if insert_before:
+ self.add_col_ordering += ((colname, insert_before),)
+ if insert_after:
+ self.add_col_ordering += ((insert_after, colname),)
+
+ if (
+ not self.partial_reordering
+ and not insert_before
+ and not insert_after
+ and col_indexes
+ ):
+ self.add_col_ordering += ((index_cols[-1], colname),)
+
+ def add_column(
+ self,
+ table_name: str,
+ column: Column[Any],
+ insert_before: Optional[str] = None,
+ insert_after: Optional[str] = None,
+ **kw,
+ ) -> None:
+ self._setup_dependencies_for_add_column(
+ column.name, insert_before, insert_after
+ )
+ # we copy the column because operations.add_column()
+ # gives us a Column that is part of a Table already.
+ self.columns[column.name] = _copy(column, schema=self.table.schema)
+ self.column_transfers[column.name] = {}
+
+ def drop_column(
+ self,
+ table_name: str,
+ column: Union[ColumnClause[Any], Column[Any]],
+ **kw,
+ ) -> None:
+ if column.name in self.table.primary_key.columns:
+ _remove_column_from_collection(
+ self.table.primary_key.columns, column
+ )
+ del self.columns[column.name]
+ del self.column_transfers[column.name]
+ self.existing_ordering.remove(column.name)
+
+ # pop named constraints for Boolean/Enum for rename
+ if (
+ "existing_type" in kw
+ and isinstance(kw["existing_type"], SchemaEventTarget)
+ and kw["existing_type"].name # type:ignore[attr-defined]
+ ):
+ self.named_constraints.pop(
+ kw["existing_type"].name, None # type:ignore[attr-defined]
+ )
+
+ def create_column_comment(self, column):
+ """the batch table creation function will issue create_column_comment
+ on the real "impl" as part of the create table process.
+
+ That is, the Column object will have the comment on it already,
+ so when it is received by add_column() it will be a normal part of
+ the CREATE TABLE and doesn't need an extra step here.
+
+ """
+
+ def create_table_comment(self, table):
+ """the batch table creation function will issue create_table_comment
+ on the real "impl" as part of the create table process.
+
+ """
+
+ def drop_table_comment(self, table):
+ """the batch table creation function will issue drop_table_comment
+ on the real "impl" as part of the create table process.
+
+ """
+
+ def add_constraint(self, const: Constraint) -> None:
+ if not constraint_name_defined(const.name):
+ raise ValueError("Constraint must have a name")
+ if isinstance(const, sql_schema.PrimaryKeyConstraint):
+ if self.table.primary_key in self.unnamed_constraints:
+ self.unnamed_constraints.remove(self.table.primary_key)
+
+ if constraint_name_string(const.name):
+ self.named_constraints[const.name] = const
+ else:
+ self.unnamed_constraints.append(const)
+
+ def drop_constraint(self, const: Constraint) -> None:
+ if not const.name:
+ raise ValueError("Constraint must have a name")
+ try:
+ if const.name in self.col_named_constraints:
+ col, const = self.col_named_constraints.pop(const.name)
+
+ for col_const in list(self.columns[col.name].constraints):
+ if col_const.name == const.name:
+ self.columns[col.name].constraints.remove(col_const)
+ elif constraint_name_string(const.name):
+ const = self.named_constraints.pop(const.name)
+ elif const in self.unnamed_constraints:
+ self.unnamed_constraints.remove(const)
+
+ except KeyError:
+ if _is_type_bound(const):
+ # type-bound constraints are only included in the new
+ # table via their type object in any case, so ignore the
+ # drop_constraint() that comes here via the
+ # Operations.implementation_for(alter_column)
+ return
+ raise ValueError("No such constraint: '%s'" % const.name)
+ else:
+ if isinstance(const, PrimaryKeyConstraint):
+ for col in const.columns:
+ self.columns[col.name].primary_key = False
+
+ def create_index(self, idx: Index) -> None:
+ self.new_indexes[idx.name] = idx # type: ignore[index]
+
+ def drop_index(self, idx: Index) -> None:
+ try:
+ del self.indexes[idx.name] # type: ignore[arg-type]
+ except KeyError:
+ raise ValueError("No such index: '%s'" % idx.name)
+
+ def rename_table(self, *arg, **kw):
+ raise NotImplementedError("TODO")
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/operations/ops.py b/Backend/venv/lib/python3.12/site-packages/alembic/operations/ops.py
new file mode 100644
index 00000000..711d7aba
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/operations/ops.py
@@ -0,0 +1,2764 @@
+from __future__ import annotations
+
+from abc import abstractmethod
+import re
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import FrozenSet
+from typing import Iterator
+from typing import List
+from typing import MutableMapping
+from typing import Optional
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import Type
+from typing import TYPE_CHECKING
+from typing import Union
+
+from sqlalchemy.types import NULLTYPE
+
+from . import schemaobj
+from .base import BatchOperations
+from .base import Operations
+from .. import util
+from ..util import sqla_compat
+
+if TYPE_CHECKING:
+ from typing import Literal
+
+ from sqlalchemy.sql import Executable
+ from sqlalchemy.sql.elements import ColumnElement
+ from sqlalchemy.sql.elements import conv
+ from sqlalchemy.sql.elements import quoted_name
+ from sqlalchemy.sql.elements import TextClause
+ from sqlalchemy.sql.functions import Function
+ from sqlalchemy.sql.schema import CheckConstraint
+ from sqlalchemy.sql.schema import Column
+ from sqlalchemy.sql.schema import Computed
+ from sqlalchemy.sql.schema import Constraint
+ from sqlalchemy.sql.schema import ForeignKeyConstraint
+ from sqlalchemy.sql.schema import Identity
+ from sqlalchemy.sql.schema import Index
+ from sqlalchemy.sql.schema import MetaData
+ from sqlalchemy.sql.schema import PrimaryKeyConstraint
+ from sqlalchemy.sql.schema import SchemaItem
+ from sqlalchemy.sql.schema import Table
+ from sqlalchemy.sql.schema import UniqueConstraint
+ from sqlalchemy.sql.selectable import TableClause
+ from sqlalchemy.sql.type_api import TypeEngine
+
+ from ..autogenerate.rewriter import Rewriter
+ from ..runtime.migration import MigrationContext
+ from ..script.revision import _RevIdType
+
+
+class MigrateOperation:
+ """base class for migration command and organization objects.
+
+ This system is part of the operation extensibility API.
+
+ .. seealso::
+
+ :ref:`operation_objects`
+
+ :ref:`operation_plugins`
+
+ :ref:`customizing_revision`
+
+ """
+
+ @util.memoized_property
+ def info(self):
+ """A dictionary that may be used to store arbitrary information
+ along with this :class:`.MigrateOperation` object.
+
+ """
+ return {}
+
+ _mutations: FrozenSet[Rewriter] = frozenset()
+
+ def reverse(self) -> MigrateOperation:
+ raise NotImplementedError
+
+ def to_diff_tuple(self) -> Tuple[Any, ...]:
+ raise NotImplementedError
+
+
+class AddConstraintOp(MigrateOperation):
+ """Represent an add constraint operation."""
+
+ add_constraint_ops = util.Dispatcher()
+
+ @property
+ def constraint_type(self):
+ raise NotImplementedError()
+
+ @classmethod
+ def register_add_constraint(cls, type_: str) -> Callable:
+ def go(klass):
+ cls.add_constraint_ops.dispatch_for(type_)(klass.from_constraint)
+ return klass
+
+ return go
+
+ @classmethod
+ def from_constraint(cls, constraint: Constraint) -> AddConstraintOp:
+ return cls.add_constraint_ops.dispatch(constraint.__visit_name__)(
+ constraint
+ )
+
+ @abstractmethod
+ def to_constraint(
+ self, migration_context: Optional[MigrationContext] = None
+ ) -> Constraint:
+ pass
+
+ def reverse(self) -> DropConstraintOp:
+ return DropConstraintOp.from_constraint(self.to_constraint())
+
+ def to_diff_tuple(self) -> Tuple[str, Constraint]:
+ return ("add_constraint", self.to_constraint())
+
+
+@Operations.register_operation("drop_constraint")
+@BatchOperations.register_operation("drop_constraint", "batch_drop_constraint")
+class DropConstraintOp(MigrateOperation):
+ """Represent a drop constraint operation."""
+
+ def __init__(
+ self,
+ constraint_name: Optional[sqla_compat._ConstraintNameDefined],
+ table_name: str,
+ type_: Optional[str] = None,
+ *,
+ schema: Optional[str] = None,
+ _reverse: Optional[AddConstraintOp] = None,
+ ) -> None:
+ self.constraint_name = constraint_name
+ self.table_name = table_name
+ self.constraint_type = type_
+ self.schema = schema
+ self._reverse = _reverse
+
+ def reverse(self) -> AddConstraintOp:
+ return AddConstraintOp.from_constraint(self.to_constraint())
+
+ def to_diff_tuple(
+ self,
+ ) -> Tuple[str, SchemaItem]:
+ if self.constraint_type == "foreignkey":
+ return ("remove_fk", self.to_constraint())
+ else:
+ return ("remove_constraint", self.to_constraint())
+
+ @classmethod
+ def from_constraint(cls, constraint: Constraint) -> DropConstraintOp:
+ types = {
+ "unique_constraint": "unique",
+ "foreign_key_constraint": "foreignkey",
+ "primary_key_constraint": "primary",
+ "check_constraint": "check",
+ "column_check_constraint": "check",
+ "table_or_column_check_constraint": "check",
+ }
+
+ constraint_table = sqla_compat._table_for_constraint(constraint)
+ return cls(
+ sqla_compat.constraint_name_or_none(constraint.name),
+ constraint_table.name,
+ schema=constraint_table.schema,
+ type_=types.get(constraint.__visit_name__),
+ _reverse=AddConstraintOp.from_constraint(constraint),
+ )
+
+ def to_constraint(self) -> Constraint:
+ if self._reverse is not None:
+ constraint = self._reverse.to_constraint()
+ constraint.name = self.constraint_name
+ constraint_table = sqla_compat._table_for_constraint(constraint)
+ constraint_table.name = self.table_name
+ constraint_table.schema = self.schema
+
+ return constraint
+ else:
+ raise ValueError(
+ "constraint cannot be produced; "
+ "original constraint is not present"
+ )
+
+ @classmethod
+ def drop_constraint(
+ cls,
+ operations: Operations,
+ constraint_name: str,
+ table_name: str,
+ type_: Optional[str] = None,
+ *,
+ schema: Optional[str] = None,
+ ) -> None:
+ r"""Drop a constraint of the given name, typically via DROP CONSTRAINT.
+
+ :param constraint_name: name of the constraint.
+ :param table_name: table name.
+ :param type\_: optional, required on MySQL. can be
+ 'foreignkey', 'primary', 'unique', or 'check'.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """
+
+ op = cls(constraint_name, table_name, type_=type_, schema=schema)
+ return operations.invoke(op)
+
+ @classmethod
+ def batch_drop_constraint(
+ cls,
+ operations: BatchOperations,
+ constraint_name: str,
+ type_: Optional[str] = None,
+ ) -> None:
+ """Issue a "drop constraint" instruction using the
+ current batch migration context.
+
+ The batch form of this call omits the ``table_name`` and ``schema``
+ arguments from the call.
+
+ .. seealso::
+
+ :meth:`.Operations.drop_constraint`
+
+ """
+ op = cls(
+ constraint_name,
+ operations.impl.table_name,
+ type_=type_,
+ schema=operations.impl.schema,
+ )
+ return operations.invoke(op)
+
+
+@Operations.register_operation("create_primary_key")
+@BatchOperations.register_operation(
+ "create_primary_key", "batch_create_primary_key"
+)
+@AddConstraintOp.register_add_constraint("primary_key_constraint")
+class CreatePrimaryKeyOp(AddConstraintOp):
+ """Represent a create primary key operation."""
+
+ constraint_type = "primarykey"
+
+ def __init__(
+ self,
+ constraint_name: Optional[sqla_compat._ConstraintNameDefined],
+ table_name: str,
+ columns: Sequence[str],
+ *,
+ schema: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ self.constraint_name = constraint_name
+ self.table_name = table_name
+ self.columns = columns
+ self.schema = schema
+ self.kw = kw
+
+ @classmethod
+ def from_constraint(cls, constraint: Constraint) -> CreatePrimaryKeyOp:
+ constraint_table = sqla_compat._table_for_constraint(constraint)
+ pk_constraint = cast("PrimaryKeyConstraint", constraint)
+ return cls(
+ sqla_compat.constraint_name_or_none(pk_constraint.name),
+ constraint_table.name,
+ pk_constraint.columns.keys(),
+ schema=constraint_table.schema,
+ **pk_constraint.dialect_kwargs,
+ )
+
+ def to_constraint(
+ self, migration_context: Optional[MigrationContext] = None
+ ) -> PrimaryKeyConstraint:
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+
+ return schema_obj.primary_key_constraint(
+ self.constraint_name,
+ self.table_name,
+ self.columns,
+ schema=self.schema,
+ **self.kw,
+ )
+
+ @classmethod
+ def create_primary_key(
+ cls,
+ operations: Operations,
+ constraint_name: Optional[str],
+ table_name: str,
+ columns: List[str],
+ *,
+ schema: Optional[str] = None,
+ ) -> None:
+ """Issue a "create primary key" instruction using the current
+ migration context.
+
+ e.g.::
+
+ from alembic import op
+
+ op.create_primary_key("pk_my_table", "my_table", ["id", "version"])
+
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
+ containing the necessary columns, then generates a new
+ :class:`~sqlalchemy.schema.PrimaryKeyConstraint`
+ object which it then associates with the
+ :class:`~sqlalchemy.schema.Table`.
+ Any event listeners associated with this action will be fired
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
+ construct is ultimately used to generate the ALTER statement.
+
+ :param constraint_name: Name of the primary key constraint. The name
+ is necessary so that an ALTER statement can be emitted. For setups
+ that use an automated naming scheme such as that described at
+ :ref:`sqla:constraint_naming_conventions`
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param table_name: String name of the target table.
+ :param columns: a list of string column names to be applied to the
+ primary key constraint.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """
+ op = cls(constraint_name, table_name, columns, schema=schema)
+ return operations.invoke(op)
+
+ @classmethod
+ def batch_create_primary_key(
+ cls,
+ operations: BatchOperations,
+ constraint_name: str,
+ columns: List[str],
+ ) -> None:
+ """Issue a "create primary key" instruction using the
+ current batch migration context.
+
+ The batch form of this call omits the ``table_name`` and ``schema``
+ arguments from the call.
+
+ .. seealso::
+
+ :meth:`.Operations.create_primary_key`
+
+ """
+ op = cls(
+ constraint_name,
+ operations.impl.table_name,
+ columns,
+ schema=operations.impl.schema,
+ )
+ return operations.invoke(op)
+
+
+@Operations.register_operation("create_unique_constraint")
+@BatchOperations.register_operation(
+ "create_unique_constraint", "batch_create_unique_constraint"
+)
+@AddConstraintOp.register_add_constraint("unique_constraint")
+class CreateUniqueConstraintOp(AddConstraintOp):
+ """Represent a create unique constraint operation."""
+
+ constraint_type = "unique"
+
+ def __init__(
+ self,
+ constraint_name: Optional[sqla_compat._ConstraintNameDefined],
+ table_name: str,
+ columns: Sequence[str],
+ *,
+ schema: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ self.constraint_name = constraint_name
+ self.table_name = table_name
+ self.columns = columns
+ self.schema = schema
+ self.kw = kw
+
+ @classmethod
+ def from_constraint(
+ cls, constraint: Constraint
+ ) -> CreateUniqueConstraintOp:
+ constraint_table = sqla_compat._table_for_constraint(constraint)
+
+ uq_constraint = cast("UniqueConstraint", constraint)
+
+ kw: dict = {}
+ if uq_constraint.deferrable:
+ kw["deferrable"] = uq_constraint.deferrable
+ if uq_constraint.initially:
+ kw["initially"] = uq_constraint.initially
+ kw.update(uq_constraint.dialect_kwargs)
+ return cls(
+ sqla_compat.constraint_name_or_none(uq_constraint.name),
+ constraint_table.name,
+ [c.name for c in uq_constraint.columns],
+ schema=constraint_table.schema,
+ **kw,
+ )
+
+ def to_constraint(
+ self, migration_context: Optional[MigrationContext] = None
+ ) -> UniqueConstraint:
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+ return schema_obj.unique_constraint(
+ self.constraint_name,
+ self.table_name,
+ self.columns,
+ schema=self.schema,
+ **self.kw,
+ )
+
+ @classmethod
+ def create_unique_constraint(
+ cls,
+ operations: Operations,
+ constraint_name: Optional[str],
+ table_name: str,
+ columns: Sequence[str],
+ *,
+ schema: Optional[str] = None,
+ **kw: Any,
+ ) -> Any:
+ """Issue a "create unique constraint" instruction using the
+ current migration context.
+
+ e.g.::
+
+ from alembic import op
+ op.create_unique_constraint("uq_user_name", "user", ["name"])
+
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
+ containing the necessary columns, then generates a new
+ :class:`~sqlalchemy.schema.UniqueConstraint`
+ object which it then associates with the
+ :class:`~sqlalchemy.schema.Table`.
+ Any event listeners associated with this action will be fired
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
+ construct is ultimately used to generate the ALTER statement.
+
+ :param name: Name of the unique constraint. The name is necessary
+ so that an ALTER statement can be emitted. For setups that
+ use an automated naming scheme such as that described at
+ :ref:`sqla:constraint_naming_conventions`,
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param table_name: String name of the source table.
+ :param columns: a list of string column names in the
+ source table.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
+ NOT DEFERRABLE when issuing DDL for this constraint.
+ :param initially: optional string. If set, emit INITIALLY
+ when issuing DDL for this constraint.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """
+
+ op = cls(constraint_name, table_name, columns, schema=schema, **kw)
+ return operations.invoke(op)
+
+ @classmethod
+ def batch_create_unique_constraint(
+ cls,
+ operations: BatchOperations,
+ constraint_name: str,
+ columns: Sequence[str],
+ **kw: Any,
+ ) -> Any:
+ """Issue a "create unique constraint" instruction using the
+ current batch migration context.
+
+ The batch form of this call omits the ``source`` and ``schema``
+ arguments from the call.
+
+ .. seealso::
+
+ :meth:`.Operations.create_unique_constraint`
+
+ """
+ kw["schema"] = operations.impl.schema
+ op = cls(constraint_name, operations.impl.table_name, columns, **kw)
+ return operations.invoke(op)
+
+
+@Operations.register_operation("create_foreign_key")
+@BatchOperations.register_operation(
+ "create_foreign_key", "batch_create_foreign_key"
+)
+@AddConstraintOp.register_add_constraint("foreign_key_constraint")
+class CreateForeignKeyOp(AddConstraintOp):
+ """Represent a create foreign key constraint operation."""
+
+ constraint_type = "foreignkey"
+
+ def __init__(
+ self,
+ constraint_name: Optional[sqla_compat._ConstraintNameDefined],
+ source_table: str,
+ referent_table: str,
+ local_cols: List[str],
+ remote_cols: List[str],
+ **kw: Any,
+ ) -> None:
+ self.constraint_name = constraint_name
+ self.source_table = source_table
+ self.referent_table = referent_table
+ self.local_cols = local_cols
+ self.remote_cols = remote_cols
+ self.kw = kw
+
+ def to_diff_tuple(self) -> Tuple[str, ForeignKeyConstraint]:
+ return ("add_fk", self.to_constraint())
+
+ @classmethod
+ def from_constraint(cls, constraint: Constraint) -> CreateForeignKeyOp:
+ fk_constraint = cast("ForeignKeyConstraint", constraint)
+ kw: dict = {}
+ if fk_constraint.onupdate:
+ kw["onupdate"] = fk_constraint.onupdate
+ if fk_constraint.ondelete:
+ kw["ondelete"] = fk_constraint.ondelete
+ if fk_constraint.initially:
+ kw["initially"] = fk_constraint.initially
+ if fk_constraint.deferrable:
+ kw["deferrable"] = fk_constraint.deferrable
+ if fk_constraint.use_alter:
+ kw["use_alter"] = fk_constraint.use_alter
+ if fk_constraint.match:
+ kw["match"] = fk_constraint.match
+
+ (
+ source_schema,
+ source_table,
+ source_columns,
+ target_schema,
+ target_table,
+ target_columns,
+ onupdate,
+ ondelete,
+ deferrable,
+ initially,
+ ) = sqla_compat._fk_spec(fk_constraint)
+
+ kw["source_schema"] = source_schema
+ kw["referent_schema"] = target_schema
+ kw.update(fk_constraint.dialect_kwargs)
+ return cls(
+ sqla_compat.constraint_name_or_none(fk_constraint.name),
+ source_table,
+ target_table,
+ source_columns,
+ target_columns,
+ **kw,
+ )
+
+ def to_constraint(
+ self, migration_context: Optional[MigrationContext] = None
+ ) -> ForeignKeyConstraint:
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+ return schema_obj.foreign_key_constraint(
+ self.constraint_name,
+ self.source_table,
+ self.referent_table,
+ self.local_cols,
+ self.remote_cols,
+ **self.kw,
+ )
+
+ @classmethod
+ def create_foreign_key(
+ cls,
+ operations: Operations,
+ constraint_name: Optional[str],
+ source_table: str,
+ referent_table: str,
+ local_cols: List[str],
+ remote_cols: List[str],
+ *,
+ onupdate: Optional[str] = None,
+ ondelete: Optional[str] = None,
+ deferrable: Optional[bool] = None,
+ initially: Optional[str] = None,
+ match: Optional[str] = None,
+ source_schema: Optional[str] = None,
+ referent_schema: Optional[str] = None,
+ **dialect_kw: Any,
+ ) -> None:
+ """Issue a "create foreign key" instruction using the
+ current migration context.
+
+ e.g.::
+
+ from alembic import op
+
+ op.create_foreign_key(
+ "fk_user_address",
+ "address",
+ "user",
+ ["user_id"],
+ ["id"],
+ )
+
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
+ containing the necessary columns, then generates a new
+ :class:`~sqlalchemy.schema.ForeignKeyConstraint`
+ object which it then associates with the
+ :class:`~sqlalchemy.schema.Table`.
+ Any event listeners associated with this action will be fired
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
+ construct is ultimately used to generate the ALTER statement.
+
+ :param constraint_name: Name of the foreign key constraint. The name
+ is necessary so that an ALTER statement can be emitted. For setups
+ that use an automated naming scheme such as that described at
+ :ref:`sqla:constraint_naming_conventions`,
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param source_table: String name of the source table.
+ :param referent_table: String name of the destination table.
+ :param local_cols: a list of string column names in the
+ source table.
+ :param remote_cols: a list of string column names in the
+ remote table.
+ :param onupdate: Optional string. If set, emit ON UPDATE when
+ issuing DDL for this constraint. Typical values include CASCADE,
+ DELETE and RESTRICT.
+ :param ondelete: Optional string. If set, emit ON DELETE when
+ issuing DDL for this constraint. Typical values include CASCADE,
+ DELETE and RESTRICT.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or NOT
+ DEFERRABLE when issuing DDL for this constraint.
+ :param source_schema: Optional schema name of the source table.
+ :param referent_schema: Optional schema name of the destination table.
+
+ """
+
+ op = cls(
+ constraint_name,
+ source_table,
+ referent_table,
+ local_cols,
+ remote_cols,
+ onupdate=onupdate,
+ ondelete=ondelete,
+ deferrable=deferrable,
+ source_schema=source_schema,
+ referent_schema=referent_schema,
+ initially=initially,
+ match=match,
+ **dialect_kw,
+ )
+ return operations.invoke(op)
+
+ @classmethod
+ def batch_create_foreign_key(
+ cls,
+ operations: BatchOperations,
+ constraint_name: str,
+ referent_table: str,
+ local_cols: List[str],
+ remote_cols: List[str],
+ *,
+ referent_schema: Optional[str] = None,
+ onupdate: Optional[str] = None,
+ ondelete: Optional[str] = None,
+ deferrable: Optional[bool] = None,
+ initially: Optional[str] = None,
+ match: Optional[str] = None,
+ **dialect_kw: Any,
+ ) -> None:
+ """Issue a "create foreign key" instruction using the
+ current batch migration context.
+
+ The batch form of this call omits the ``source`` and ``source_schema``
+ arguments from the call.
+
+ e.g.::
+
+ with batch_alter_table("address") as batch_op:
+ batch_op.create_foreign_key(
+ "fk_user_address",
+ "user",
+ ["user_id"],
+ ["id"],
+ )
+
+ .. seealso::
+
+ :meth:`.Operations.create_foreign_key`
+
+ """
+ op = cls(
+ constraint_name,
+ operations.impl.table_name,
+ referent_table,
+ local_cols,
+ remote_cols,
+ onupdate=onupdate,
+ ondelete=ondelete,
+ deferrable=deferrable,
+ source_schema=operations.impl.schema,
+ referent_schema=referent_schema,
+ initially=initially,
+ match=match,
+ **dialect_kw,
+ )
+ return operations.invoke(op)
+
+
+@Operations.register_operation("create_check_constraint")
+@BatchOperations.register_operation(
+ "create_check_constraint", "batch_create_check_constraint"
+)
+@AddConstraintOp.register_add_constraint("check_constraint")
+@AddConstraintOp.register_add_constraint("table_or_column_check_constraint")
+@AddConstraintOp.register_add_constraint("column_check_constraint")
+class CreateCheckConstraintOp(AddConstraintOp):
+ """Represent a create check constraint operation."""
+
+ constraint_type = "check"
+
+ def __init__(
+ self,
+ constraint_name: Optional[sqla_compat._ConstraintNameDefined],
+ table_name: str,
+ condition: Union[str, TextClause, ColumnElement[Any]],
+ *,
+ schema: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ self.constraint_name = constraint_name
+ self.table_name = table_name
+ self.condition = condition
+ self.schema = schema
+ self.kw = kw
+
+ @classmethod
+ def from_constraint(
+ cls, constraint: Constraint
+ ) -> CreateCheckConstraintOp:
+ constraint_table = sqla_compat._table_for_constraint(constraint)
+
+ ck_constraint = cast("CheckConstraint", constraint)
+ return cls(
+ sqla_compat.constraint_name_or_none(ck_constraint.name),
+ constraint_table.name,
+ cast("ColumnElement[Any]", ck_constraint.sqltext),
+ schema=constraint_table.schema,
+ **ck_constraint.dialect_kwargs,
+ )
+
+ def to_constraint(
+ self, migration_context: Optional[MigrationContext] = None
+ ) -> CheckConstraint:
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+ return schema_obj.check_constraint(
+ self.constraint_name,
+ self.table_name,
+ self.condition,
+ schema=self.schema,
+ **self.kw,
+ )
+
+ @classmethod
+ def create_check_constraint(
+ cls,
+ operations: Operations,
+ constraint_name: Optional[str],
+ table_name: str,
+ condition: Union[str, ColumnElement[bool], TextClause],
+ *,
+ schema: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ """Issue a "create check constraint" instruction using the
+ current migration context.
+
+ e.g.::
+
+ from alembic import op
+ from sqlalchemy.sql import column, func
+
+ op.create_check_constraint(
+ "ck_user_name_len",
+ "user",
+ func.len(column("name")) > 5,
+ )
+
+ CHECK constraints are usually against a SQL expression, so ad-hoc
+ table metadata is usually needed. The function will convert the given
+ arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound
+ to an anonymous table in order to emit the CREATE statement.
+
+ :param name: Name of the check constraint. The name is necessary
+ so that an ALTER statement can be emitted. For setups that
+ use an automated naming scheme such as that described at
+ :ref:`sqla:constraint_naming_conventions`,
+ ``name`` here can be ``None``, as the event listener will
+ apply the name to the constraint object when it is associated
+ with the table.
+ :param table_name: String name of the source table.
+ :param condition: SQL expression that's the condition of the
+ constraint. Can be a string or SQLAlchemy expression language
+ structure.
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
+ NOT DEFERRABLE when issuing DDL for this constraint.
+ :param initially: optional string. If set, emit INITIALLY
+ when issuing DDL for this constraint.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """
+ op = cls(constraint_name, table_name, condition, schema=schema, **kw)
+ return operations.invoke(op)
+
+ @classmethod
+ def batch_create_check_constraint(
+ cls,
+ operations: BatchOperations,
+ constraint_name: str,
+ condition: Union[str, ColumnElement[bool], TextClause],
+ **kw: Any,
+ ) -> None:
+ """Issue a "create check constraint" instruction using the
+ current batch migration context.
+
+ The batch form of this call omits the ``source`` and ``schema``
+ arguments from the call.
+
+ .. seealso::
+
+ :meth:`.Operations.create_check_constraint`
+
+ """
+ op = cls(
+ constraint_name,
+ operations.impl.table_name,
+ condition,
+ schema=operations.impl.schema,
+ **kw,
+ )
+ return operations.invoke(op)
+
+
+@Operations.register_operation("create_index")
+@BatchOperations.register_operation("create_index", "batch_create_index")
+class CreateIndexOp(MigrateOperation):
+ """Represent a create index operation."""
+
+ def __init__(
+ self,
+ index_name: Optional[str],
+ table_name: str,
+ columns: Sequence[Union[str, TextClause, ColumnElement[Any]]],
+ *,
+ schema: Optional[str] = None,
+ unique: bool = False,
+ if_not_exists: Optional[bool] = None,
+ **kw: Any,
+ ) -> None:
+ self.index_name = index_name
+ self.table_name = table_name
+ self.columns = columns
+ self.schema = schema
+ self.unique = unique
+ self.if_not_exists = if_not_exists
+ self.kw = kw
+
+ def reverse(self) -> DropIndexOp:
+ return DropIndexOp.from_index(self.to_index())
+
+ def to_diff_tuple(self) -> Tuple[str, Index]:
+ return ("add_index", self.to_index())
+
+ @classmethod
+ def from_index(cls, index: Index) -> CreateIndexOp:
+ assert index.table is not None
+ return cls(
+ index.name, # type: ignore[arg-type]
+ index.table.name,
+ sqla_compat._get_index_expressions(index),
+ schema=index.table.schema,
+ unique=index.unique,
+ **index.kwargs,
+ )
+
+ def to_index(
+ self, migration_context: Optional[MigrationContext] = None
+ ) -> Index:
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+
+ idx = schema_obj.index(
+ self.index_name,
+ self.table_name,
+ self.columns,
+ schema=self.schema,
+ unique=self.unique,
+ **self.kw,
+ )
+ return idx
+
+ @classmethod
+ def create_index(
+ cls,
+ operations: Operations,
+ index_name: Optional[str],
+ table_name: str,
+ columns: Sequence[Union[str, TextClause, Function[Any]]],
+ *,
+ schema: Optional[str] = None,
+ unique: bool = False,
+ if_not_exists: Optional[bool] = None,
+ **kw: Any,
+ ) -> None:
+ r"""Issue a "create index" instruction using the current
+ migration context.
+
+ e.g.::
+
+ from alembic import op
+
+ op.create_index("ik_test", "t1", ["foo", "bar"])
+
+ Functional indexes can be produced by using the
+ :func:`sqlalchemy.sql.expression.text` construct::
+
+ from alembic import op
+ from sqlalchemy import text
+
+ op.create_index("ik_test", "t1", [text("lower(foo)")])
+
+ :param index_name: name of the index.
+ :param table_name: name of the owning table.
+ :param columns: a list consisting of string column names and/or
+ :func:`~sqlalchemy.sql.expression.text` constructs.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param unique: If True, create a unique index.
+
+ :param quote: Force quoting of this column's name on or off,
+ corresponding to ``True`` or ``False``. When left at its default
+ of ``None``, the column identifier will be quoted according to
+ whether the name is case sensitive (identifiers with at least one
+ upper case character are treated as case sensitive), or if it's a
+ reserved word. This flag is only needed to force quoting of a
+ reserved word which is not known by the SQLAlchemy dialect.
+
+ :param if_not_exists: If True, adds IF NOT EXISTS operator when
+ creating the new index.
+
+ .. versionadded:: 1.12.0
+
+ :param \**kw: Additional keyword arguments not mentioned above are
+ dialect specific, and passed in the form
+ ``_``.
+ See the documentation regarding an individual dialect at
+ :ref:`dialect_toplevel` for detail on documented arguments.
+
+ """
+ op = cls(
+ index_name,
+ table_name,
+ columns,
+ schema=schema,
+ unique=unique,
+ if_not_exists=if_not_exists,
+ **kw,
+ )
+ return operations.invoke(op)
+
+ @classmethod
+ def batch_create_index(
+ cls,
+ operations: BatchOperations,
+ index_name: str,
+ columns: List[str],
+ **kw: Any,
+ ) -> None:
+ """Issue a "create index" instruction using the
+ current batch migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.create_index`
+
+ """
+
+ op = cls(
+ index_name,
+ operations.impl.table_name,
+ columns,
+ schema=operations.impl.schema,
+ **kw,
+ )
+ return operations.invoke(op)
+
+
+@Operations.register_operation("drop_index")
+@BatchOperations.register_operation("drop_index", "batch_drop_index")
+class DropIndexOp(MigrateOperation):
+ """Represent a drop index operation."""
+
+ def __init__(
+ self,
+ index_name: Union[quoted_name, str, conv],
+ table_name: Optional[str] = None,
+ *,
+ schema: Optional[str] = None,
+ if_exists: Optional[bool] = None,
+ _reverse: Optional[CreateIndexOp] = None,
+ **kw: Any,
+ ) -> None:
+ self.index_name = index_name
+ self.table_name = table_name
+ self.schema = schema
+ self.if_exists = if_exists
+ self._reverse = _reverse
+ self.kw = kw
+
+ def to_diff_tuple(self) -> Tuple[str, Index]:
+ return ("remove_index", self.to_index())
+
+ def reverse(self) -> CreateIndexOp:
+ return CreateIndexOp.from_index(self.to_index())
+
+ @classmethod
+ def from_index(cls, index: Index) -> DropIndexOp:
+ assert index.table is not None
+ return cls(
+ index.name, # type: ignore[arg-type]
+ table_name=index.table.name,
+ schema=index.table.schema,
+ _reverse=CreateIndexOp.from_index(index),
+ **index.kwargs,
+ )
+
+ def to_index(
+ self, migration_context: Optional[MigrationContext] = None
+ ) -> Index:
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+
+ # need a dummy column name here since SQLAlchemy
+ # 0.7.6 and further raises on Index with no columns
+ return schema_obj.index(
+ self.index_name,
+ self.table_name,
+ self._reverse.columns if self._reverse else ["x"],
+ schema=self.schema,
+ **self.kw,
+ )
+
+ @classmethod
+ def drop_index(
+ cls,
+ operations: Operations,
+ index_name: str,
+ table_name: Optional[str] = None,
+ *,
+ schema: Optional[str] = None,
+ if_exists: Optional[bool] = None,
+ **kw: Any,
+ ) -> None:
+ r"""Issue a "drop index" instruction using the current
+ migration context.
+
+ e.g.::
+
+ drop_index("accounts")
+
+ :param index_name: name of the index.
+ :param table_name: name of the owning table. Some
+ backends such as Microsoft SQL Server require this.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ :param if_exists: If True, adds IF EXISTS operator when
+ dropping the index.
+
+ .. versionadded:: 1.12.0
+
+ :param \**kw: Additional keyword arguments not mentioned above are
+ dialect specific, and passed in the form
+ ``_``.
+ See the documentation regarding an individual dialect at
+ :ref:`dialect_toplevel` for detail on documented arguments.
+
+ """
+ op = cls(
+ index_name,
+ table_name=table_name,
+ schema=schema,
+ if_exists=if_exists,
+ **kw,
+ )
+ return operations.invoke(op)
+
+ @classmethod
+ def batch_drop_index(
+ cls, operations: BatchOperations, index_name: str, **kw: Any
+ ) -> None:
+ """Issue a "drop index" instruction using the
+ current batch migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.drop_index`
+
+ """
+
+ op = cls(
+ index_name,
+ table_name=operations.impl.table_name,
+ schema=operations.impl.schema,
+ **kw,
+ )
+ return operations.invoke(op)
+
+
+@Operations.register_operation("create_table")
+class CreateTableOp(MigrateOperation):
+ """Represent a create table operation."""
+
+ def __init__(
+ self,
+ table_name: str,
+ columns: Sequence[SchemaItem],
+ *,
+ schema: Optional[str] = None,
+ _namespace_metadata: Optional[MetaData] = None,
+ _constraints_included: bool = False,
+ **kw: Any,
+ ) -> None:
+ self.table_name = table_name
+ self.columns = columns
+ self.schema = schema
+ self.info = kw.pop("info", {})
+ self.comment = kw.pop("comment", None)
+ self.prefixes = kw.pop("prefixes", None)
+ self.kw = kw
+ self._namespace_metadata = _namespace_metadata
+ self._constraints_included = _constraints_included
+
+ def reverse(self) -> DropTableOp:
+ return DropTableOp.from_table(
+ self.to_table(), _namespace_metadata=self._namespace_metadata
+ )
+
+ def to_diff_tuple(self) -> Tuple[str, Table]:
+ return ("add_table", self.to_table())
+
+ @classmethod
+ def from_table(
+ cls, table: Table, *, _namespace_metadata: Optional[MetaData] = None
+ ) -> CreateTableOp:
+ if _namespace_metadata is None:
+ _namespace_metadata = table.metadata
+
+ return cls(
+ table.name,
+ list(table.c) + list(table.constraints), # type:ignore[arg-type]
+ schema=table.schema,
+ _namespace_metadata=_namespace_metadata,
+ # given a Table() object, this Table will contain full Index()
+ # and UniqueConstraint objects already constructed in response to
+ # each unique=True / index=True flag on a Column. Carry this
+ # state along so that when we re-convert back into a Table, we
+ # skip unique=True/index=True so that these constraints are
+ # not doubled up. see #844 #848
+ _constraints_included=True,
+ comment=table.comment,
+ info=dict(table.info),
+ prefixes=list(table._prefixes),
+ **table.kwargs,
+ )
+
+ def to_table(
+ self, migration_context: Optional[MigrationContext] = None
+ ) -> Table:
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+
+ return schema_obj.table(
+ self.table_name,
+ *self.columns,
+ schema=self.schema,
+ prefixes=list(self.prefixes) if self.prefixes else [],
+ comment=self.comment,
+ info=self.info.copy() if self.info else {},
+ _constraints_included=self._constraints_included,
+ **self.kw,
+ )
+
+ @classmethod
+ def create_table(
+ cls,
+ operations: Operations,
+ table_name: str,
+ *columns: SchemaItem,
+ **kw: Any,
+ ) -> Table:
+ r"""Issue a "create table" instruction using the current migration
+ context.
+
+ This directive receives an argument list similar to that of the
+ traditional :class:`sqlalchemy.schema.Table` construct, but without the
+ metadata::
+
+ from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
+ from alembic import op
+
+ op.create_table(
+ "account",
+ Column("id", INTEGER, primary_key=True),
+ Column("name", VARCHAR(50), nullable=False),
+ Column("description", NVARCHAR(200)),
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
+ )
+
+ Note that :meth:`.create_table` accepts
+ :class:`~sqlalchemy.schema.Column`
+ constructs directly from the SQLAlchemy library. In particular,
+ default values to be created on the database side are
+ specified using the ``server_default`` parameter, and not
+ ``default`` which only specifies Python-side defaults::
+
+ from alembic import op
+ from sqlalchemy import Column, TIMESTAMP, func
+
+ # specify "DEFAULT NOW" along with the "timestamp" column
+ op.create_table(
+ "account",
+ Column("id", INTEGER, primary_key=True),
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
+ )
+
+ The function also returns a newly created
+ :class:`~sqlalchemy.schema.Table` object, corresponding to the table
+ specification given, which is suitable for
+ immediate SQL operations, in particular
+ :meth:`.Operations.bulk_insert`::
+
+ from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
+ from alembic import op
+
+ account_table = op.create_table(
+ "account",
+ Column("id", INTEGER, primary_key=True),
+ Column("name", VARCHAR(50), nullable=False),
+ Column("description", NVARCHAR(200)),
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
+ )
+
+ op.bulk_insert(
+ account_table,
+ [
+ {"name": "A1", "description": "account 1"},
+ {"name": "A2", "description": "account 2"},
+ ],
+ )
+
+ :param table_name: Name of the table
+ :param \*columns: collection of :class:`~sqlalchemy.schema.Column`
+ objects within
+ the table, as well as optional :class:`~sqlalchemy.schema.Constraint`
+ objects
+ and :class:`~.sqlalchemy.schema.Index` objects.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param \**kw: Other keyword arguments are passed to the underlying
+ :class:`sqlalchemy.schema.Table` object created for the command.
+
+ :return: the :class:`~sqlalchemy.schema.Table` object corresponding
+ to the parameters given.
+
+ """
+ op = cls(table_name, columns, **kw)
+ return operations.invoke(op)
+
+
+@Operations.register_operation("drop_table")
+class DropTableOp(MigrateOperation):
+ """Represent a drop table operation."""
+
+ def __init__(
+ self,
+ table_name: str,
+ *,
+ schema: Optional[str] = None,
+ table_kw: Optional[MutableMapping[Any, Any]] = None,
+ _reverse: Optional[CreateTableOp] = None,
+ ) -> None:
+ self.table_name = table_name
+ self.schema = schema
+ self.table_kw = table_kw or {}
+ self.comment = self.table_kw.pop("comment", None)
+ self.info = self.table_kw.pop("info", None)
+ self.prefixes = self.table_kw.pop("prefixes", None)
+ self._reverse = _reverse
+
+ def to_diff_tuple(self) -> Tuple[str, Table]:
+ return ("remove_table", self.to_table())
+
+ def reverse(self) -> CreateTableOp:
+ return CreateTableOp.from_table(self.to_table())
+
+ @classmethod
+ def from_table(
+ cls, table: Table, *, _namespace_metadata: Optional[MetaData] = None
+ ) -> DropTableOp:
+ return cls(
+ table.name,
+ schema=table.schema,
+ table_kw={
+ "comment": table.comment,
+ "info": dict(table.info),
+ "prefixes": list(table._prefixes),
+ **table.kwargs,
+ },
+ _reverse=CreateTableOp.from_table(
+ table, _namespace_metadata=_namespace_metadata
+ ),
+ )
+
+ def to_table(
+ self, migration_context: Optional[MigrationContext] = None
+ ) -> Table:
+ if self._reverse:
+ cols_and_constraints = self._reverse.columns
+ else:
+ cols_and_constraints = []
+
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+ t = schema_obj.table(
+ self.table_name,
+ *cols_and_constraints,
+ comment=self.comment,
+ info=self.info.copy() if self.info else {},
+ prefixes=list(self.prefixes) if self.prefixes else [],
+ schema=self.schema,
+ _constraints_included=self._reverse._constraints_included
+ if self._reverse
+ else False,
+ **self.table_kw,
+ )
+ return t
+
+ @classmethod
+ def drop_table(
+ cls,
+ operations: Operations,
+ table_name: str,
+ *,
+ schema: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ r"""Issue a "drop table" instruction using the current
+ migration context.
+
+
+ e.g.::
+
+ drop_table("accounts")
+
+ :param table_name: Name of the table
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param \**kw: Other keyword arguments are passed to the underlying
+ :class:`sqlalchemy.schema.Table` object created for the command.
+
+ """
+ op = cls(table_name, schema=schema, table_kw=kw)
+ operations.invoke(op)
+
+
+class AlterTableOp(MigrateOperation):
+ """Represent an alter table operation."""
+
+ def __init__(
+ self,
+ table_name: str,
+ *,
+ schema: Optional[str] = None,
+ ) -> None:
+ self.table_name = table_name
+ self.schema = schema
+
+
+@Operations.register_operation("rename_table")
+class RenameTableOp(AlterTableOp):
+ """Represent a rename table operation."""
+
+ def __init__(
+ self,
+ old_table_name: str,
+ new_table_name: str,
+ *,
+ schema: Optional[str] = None,
+ ) -> None:
+ super().__init__(old_table_name, schema=schema)
+ self.new_table_name = new_table_name
+
+ @classmethod
+ def rename_table(
+ cls,
+ operations: Operations,
+ old_table_name: str,
+ new_table_name: str,
+ *,
+ schema: Optional[str] = None,
+ ) -> None:
+ """Emit an ALTER TABLE to rename a table.
+
+ :param old_table_name: old name.
+ :param new_table_name: new name.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """
+ op = cls(old_table_name, new_table_name, schema=schema)
+ return operations.invoke(op)
+
+
+@Operations.register_operation("create_table_comment")
+@BatchOperations.register_operation(
+ "create_table_comment", "batch_create_table_comment"
+)
+class CreateTableCommentOp(AlterTableOp):
+ """Represent a COMMENT ON `table` operation."""
+
+ def __init__(
+ self,
+ table_name: str,
+ comment: Optional[str],
+ *,
+ schema: Optional[str] = None,
+ existing_comment: Optional[str] = None,
+ ) -> None:
+ self.table_name = table_name
+ self.comment = comment
+ self.existing_comment = existing_comment
+ self.schema = schema
+
+ @classmethod
+ def create_table_comment(
+ cls,
+ operations: Operations,
+ table_name: str,
+ comment: Optional[str],
+ *,
+ existing_comment: Optional[str] = None,
+ schema: Optional[str] = None,
+ ) -> None:
+ """Emit a COMMENT ON operation to set the comment for a table.
+
+ :param table_name: string name of the target table.
+ :param comment: string value of the comment being registered against
+ the specified table.
+ :param existing_comment: String value of a comment
+ already registered on the specified table, used within autogenerate
+ so that the operation is reversible, but not required for direct
+ use.
+
+ .. seealso::
+
+ :meth:`.Operations.drop_table_comment`
+
+ :paramref:`.Operations.alter_column.comment`
+
+ """
+
+ op = cls(
+ table_name,
+ comment,
+ existing_comment=existing_comment,
+ schema=schema,
+ )
+ return operations.invoke(op)
+
+ @classmethod
+ def batch_create_table_comment(
+ cls,
+ operations: BatchOperations,
+ comment: Optional[str],
+ *,
+ existing_comment: Optional[str] = None,
+ ) -> None:
+ """Emit a COMMENT ON operation to set the comment for a table
+ using the current batch migration context.
+
+ :param comment: string value of the comment being registered against
+ the specified table.
+ :param existing_comment: String value of a comment
+ already registered on the specified table, used within autogenerate
+ so that the operation is reversible, but not required for direct
+ use.
+
+ """
+
+ op = cls(
+ operations.impl.table_name,
+ comment,
+ existing_comment=existing_comment,
+ schema=operations.impl.schema,
+ )
+ return operations.invoke(op)
+
+ def reverse(self):
+ """Reverses the COMMENT ON operation against a table."""
+ if self.existing_comment is None:
+ return DropTableCommentOp(
+ self.table_name,
+ existing_comment=self.comment,
+ schema=self.schema,
+ )
+ else:
+ return CreateTableCommentOp(
+ self.table_name,
+ self.existing_comment,
+ existing_comment=self.comment,
+ schema=self.schema,
+ )
+
+ def to_table(self, migration_context=None):
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+
+ return schema_obj.table(
+ self.table_name, schema=self.schema, comment=self.comment
+ )
+
+ def to_diff_tuple(self):
+ return ("add_table_comment", self.to_table(), self.existing_comment)
+
+
+@Operations.register_operation("drop_table_comment")
+@BatchOperations.register_operation(
+ "drop_table_comment", "batch_drop_table_comment"
+)
+class DropTableCommentOp(AlterTableOp):
+ """Represent an operation to remove the comment from a table."""
+
+ def __init__(
+ self,
+ table_name: str,
+ *,
+ schema: Optional[str] = None,
+ existing_comment: Optional[str] = None,
+ ) -> None:
+ self.table_name = table_name
+ self.existing_comment = existing_comment
+ self.schema = schema
+
+ @classmethod
+ def drop_table_comment(
+ cls,
+ operations: Operations,
+ table_name: str,
+ *,
+ existing_comment: Optional[str] = None,
+ schema: Optional[str] = None,
+ ) -> None:
+ """Issue a "drop table comment" operation to
+ remove an existing comment set on a table.
+
+ :param table_name: string name of the target table.
+ :param existing_comment: An optional string value of a comment already
+ registered on the specified table.
+
+ .. seealso::
+
+ :meth:`.Operations.create_table_comment`
+
+ :paramref:`.Operations.alter_column.comment`
+
+ """
+
+ op = cls(table_name, existing_comment=existing_comment, schema=schema)
+ return operations.invoke(op)
+
+ @classmethod
+ def batch_drop_table_comment(
+ cls,
+ operations: BatchOperations,
+ *,
+ existing_comment: Optional[str] = None,
+ ) -> None:
+ """Issue a "drop table comment" operation to
+ remove an existing comment set on a table using the current
+ batch operations context.
+
+ :param existing_comment: An optional string value of a comment already
+ registered on the specified table.
+
+ """
+
+ op = cls(
+ operations.impl.table_name,
+ existing_comment=existing_comment,
+ schema=operations.impl.schema,
+ )
+ return operations.invoke(op)
+
+ def reverse(self):
+ """Reverses the COMMENT ON operation against a table."""
+ return CreateTableCommentOp(
+ self.table_name, self.existing_comment, schema=self.schema
+ )
+
+ def to_table(self, migration_context=None):
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+
+ return schema_obj.table(self.table_name, schema=self.schema)
+
+ def to_diff_tuple(self):
+ return ("remove_table_comment", self.to_table())
+
+
+@Operations.register_operation("alter_column")
+@BatchOperations.register_operation("alter_column", "batch_alter_column")
+class AlterColumnOp(AlterTableOp):
+ """Represent an alter column operation."""
+
+ def __init__(
+ self,
+ table_name: str,
+ column_name: str,
+ *,
+ schema: Optional[str] = None,
+ existing_type: Optional[Any] = None,
+ existing_server_default: Any = False,
+ existing_nullable: Optional[bool] = None,
+ existing_comment: Optional[str] = None,
+ modify_nullable: Optional[bool] = None,
+ modify_comment: Optional[Union[str, Literal[False]]] = False,
+ modify_server_default: Any = False,
+ modify_name: Optional[str] = None,
+ modify_type: Optional[Any] = None,
+ **kw: Any,
+ ) -> None:
+ super().__init__(table_name, schema=schema)
+ self.column_name = column_name
+ self.existing_type = existing_type
+ self.existing_server_default = existing_server_default
+ self.existing_nullable = existing_nullable
+ self.existing_comment = existing_comment
+ self.modify_nullable = modify_nullable
+ self.modify_comment = modify_comment
+ self.modify_server_default = modify_server_default
+ self.modify_name = modify_name
+ self.modify_type = modify_type
+ self.kw = kw
+
+ def to_diff_tuple(self) -> Any:
+ col_diff = []
+ schema, tname, cname = self.schema, self.table_name, self.column_name
+
+ if self.modify_type is not None:
+ col_diff.append(
+ (
+ "modify_type",
+ schema,
+ tname,
+ cname,
+ {
+ "existing_nullable": self.existing_nullable,
+ "existing_server_default": (
+ self.existing_server_default
+ ),
+ "existing_comment": self.existing_comment,
+ },
+ self.existing_type,
+ self.modify_type,
+ )
+ )
+
+ if self.modify_nullable is not None:
+ col_diff.append(
+ (
+ "modify_nullable",
+ schema,
+ tname,
+ cname,
+ {
+ "existing_type": self.existing_type,
+ "existing_server_default": (
+ self.existing_server_default
+ ),
+ "existing_comment": self.existing_comment,
+ },
+ self.existing_nullable,
+ self.modify_nullable,
+ )
+ )
+
+ if self.modify_server_default is not False:
+ col_diff.append(
+ (
+ "modify_default",
+ schema,
+ tname,
+ cname,
+ {
+ "existing_nullable": self.existing_nullable,
+ "existing_type": self.existing_type,
+ "existing_comment": self.existing_comment,
+ },
+ self.existing_server_default,
+ self.modify_server_default,
+ )
+ )
+
+ if self.modify_comment is not False:
+ col_diff.append(
+ (
+ "modify_comment",
+ schema,
+ tname,
+ cname,
+ {
+ "existing_nullable": self.existing_nullable,
+ "existing_type": self.existing_type,
+ "existing_server_default": (
+ self.existing_server_default
+ ),
+ },
+ self.existing_comment,
+ self.modify_comment,
+ )
+ )
+
+ return col_diff
+
+ def has_changes(self) -> bool:
+ hc1 = (
+ self.modify_nullable is not None
+ or self.modify_server_default is not False
+ or self.modify_type is not None
+ or self.modify_comment is not False
+ )
+ if hc1:
+ return True
+ for kw in self.kw:
+ if kw.startswith("modify_"):
+ return True
+ else:
+ return False
+
+ def reverse(self) -> AlterColumnOp:
+ kw = self.kw.copy()
+ kw["existing_type"] = self.existing_type
+ kw["existing_nullable"] = self.existing_nullable
+ kw["existing_server_default"] = self.existing_server_default
+ kw["existing_comment"] = self.existing_comment
+ if self.modify_type is not None:
+ kw["modify_type"] = self.modify_type
+ if self.modify_nullable is not None:
+ kw["modify_nullable"] = self.modify_nullable
+ if self.modify_server_default is not False:
+ kw["modify_server_default"] = self.modify_server_default
+ if self.modify_comment is not False:
+ kw["modify_comment"] = self.modify_comment
+
+ # TODO: make this a little simpler
+ all_keys = {
+ m.group(1)
+ for m in [re.match(r"^(?:existing_|modify_)(.+)$", k) for k in kw]
+ if m
+ }
+
+ for k in all_keys:
+ if "modify_%s" % k in kw:
+ swap = kw["existing_%s" % k]
+ kw["existing_%s" % k] = kw["modify_%s" % k]
+ kw["modify_%s" % k] = swap
+
+ return self.__class__(
+ self.table_name, self.column_name, schema=self.schema, **kw
+ )
+
+ @classmethod
+ def alter_column(
+ cls,
+ operations: Operations,
+ table_name: str,
+ column_name: str,
+ *,
+ nullable: Optional[bool] = None,
+ comment: Optional[Union[str, Literal[False]]] = False,
+ server_default: Any = False,
+ new_column_name: Optional[str] = None,
+ type_: Optional[Union[TypeEngine, Type[TypeEngine]]] = None,
+ existing_type: Optional[Union[TypeEngine, Type[TypeEngine]]] = None,
+ existing_server_default: Optional[
+ Union[str, bool, Identity, Computed]
+ ] = False,
+ existing_nullable: Optional[bool] = None,
+ existing_comment: Optional[str] = None,
+ schema: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ r"""Issue an "alter column" instruction using the
+ current migration context.
+
+ Generally, only that aspect of the column which
+ is being changed, i.e. name, type, nullability,
+ default, needs to be specified. Multiple changes
+ can also be specified at once and the backend should
+ "do the right thing", emitting each change either
+ separately or together as the backend allows.
+
+ MySQL has special requirements here, since MySQL
+ cannot ALTER a column without a full specification.
+ When producing MySQL-compatible migration files,
+ it is recommended that the ``existing_type``,
+ ``existing_server_default``, and ``existing_nullable``
+ parameters be present, if not being altered.
+
+ Type changes which are against the SQLAlchemy
+ "schema" types :class:`~sqlalchemy.types.Boolean`
+ and :class:`~sqlalchemy.types.Enum` may also
+ add or drop constraints which accompany those
+ types on backends that don't support them natively.
+ The ``existing_type`` argument is
+ used in this case to identify and remove a previous
+ constraint that was bound to the type object.
+
+ :param table_name: string name of the target table.
+ :param column_name: string name of the target column,
+ as it exists before the operation begins.
+ :param nullable: Optional; specify ``True`` or ``False``
+ to alter the column's nullability.
+ :param server_default: Optional; specify a string
+ SQL expression, :func:`~sqlalchemy.sql.expression.text`,
+ or :class:`~sqlalchemy.schema.DefaultClause` to indicate
+ an alteration to the column's default value.
+ Set to ``None`` to have the default removed.
+ :param comment: optional string text of a new comment to add to the
+ column.
+ :param new_column_name: Optional; specify a string name here to
+ indicate the new name within a column rename operation.
+ :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine`
+ type object to specify a change to the column's type.
+ For SQLAlchemy types that also indicate a constraint (i.e.
+ :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`),
+ the constraint is also generated.
+ :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column;
+ currently understood by the MySQL dialect.
+ :param existing_type: Optional; a
+ :class:`~sqlalchemy.types.TypeEngine`
+ type object to specify the previous type. This
+ is required for all MySQL column alter operations that
+ don't otherwise specify a new type, as well as for
+ when nullability is being changed on a SQL Server
+ column. It is also used if the type is a so-called
+ SQLAlchemy "schema" type which may define a constraint (i.e.
+ :class:`~sqlalchemy.types.Boolean`,
+ :class:`~sqlalchemy.types.Enum`),
+ so that the constraint can be dropped.
+ :param existing_server_default: Optional; The existing
+ default value of the column. Required on MySQL if
+ an existing default is not being changed; else MySQL
+ removes the default.
+ :param existing_nullable: Optional; the existing nullability
+ of the column. Required on MySQL if the existing nullability
+ is not being changed; else MySQL sets this to NULL.
+ :param existing_autoincrement: Optional; the existing autoincrement
+ of the column. Used for MySQL's system of altering a column
+ that specifies ``AUTO_INCREMENT``.
+ :param existing_comment: string text of the existing comment on the
+ column to be maintained. Required on MySQL if the existing comment
+ on the column is not being changed.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param postgresql_using: String argument which will indicate a
+ SQL expression to render within the Postgresql-specific USING clause
+ within ALTER COLUMN. This string is taken directly as raw SQL which
+ must explicitly include any necessary quoting or escaping of tokens
+ within the expression.
+
+ """
+
+ alt = cls(
+ table_name,
+ column_name,
+ schema=schema,
+ existing_type=existing_type,
+ existing_server_default=existing_server_default,
+ existing_nullable=existing_nullable,
+ existing_comment=existing_comment,
+ modify_name=new_column_name,
+ modify_type=type_,
+ modify_server_default=server_default,
+ modify_nullable=nullable,
+ modify_comment=comment,
+ **kw,
+ )
+
+ return operations.invoke(alt)
+
+ @classmethod
+ def batch_alter_column(
+ cls,
+ operations: BatchOperations,
+ column_name: str,
+ *,
+ nullable: Optional[bool] = None,
+ comment: Optional[Union[str, Literal[False]]] = False,
+ server_default: Any = False,
+ new_column_name: Optional[str] = None,
+ type_: Optional[Union[TypeEngine, Type[TypeEngine]]] = None,
+ existing_type: Optional[Union[TypeEngine, Type[TypeEngine]]] = None,
+ existing_server_default: Optional[
+ Union[str, bool, Identity, Computed]
+ ] = False,
+ existing_nullable: Optional[bool] = None,
+ existing_comment: Optional[str] = None,
+ insert_before: Optional[str] = None,
+ insert_after: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ """Issue an "alter column" instruction using the current
+ batch migration context.
+
+ Parameters are the same as that of :meth:`.Operations.alter_column`,
+ as well as the following option(s):
+
+ :param insert_before: String name of an existing column which this
+ column should be placed before, when creating the new table.
+
+ :param insert_after: String name of an existing column which this
+ column should be placed after, when creating the new table. If
+ both :paramref:`.BatchOperations.alter_column.insert_before`
+ and :paramref:`.BatchOperations.alter_column.insert_after` are
+ omitted, the column is inserted after the last existing column
+ in the table.
+
+ .. seealso::
+
+ :meth:`.Operations.alter_column`
+
+
+ """
+ alt = cls(
+ operations.impl.table_name,
+ column_name,
+ schema=operations.impl.schema,
+ existing_type=existing_type,
+ existing_server_default=existing_server_default,
+ existing_nullable=existing_nullable,
+ existing_comment=existing_comment,
+ modify_name=new_column_name,
+ modify_type=type_,
+ modify_server_default=server_default,
+ modify_nullable=nullable,
+ modify_comment=comment,
+ insert_before=insert_before,
+ insert_after=insert_after,
+ **kw,
+ )
+
+ return operations.invoke(alt)
+
+
+@Operations.register_operation("add_column")
+@BatchOperations.register_operation("add_column", "batch_add_column")
+class AddColumnOp(AlterTableOp):
+ """Represent an add column operation."""
+
+ def __init__(
+ self,
+ table_name: str,
+ column: Column[Any],
+ *,
+ schema: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ super().__init__(table_name, schema=schema)
+ self.column = column
+ self.kw = kw
+
+ def reverse(self) -> DropColumnOp:
+ return DropColumnOp.from_column_and_tablename(
+ self.schema, self.table_name, self.column
+ )
+
+ def to_diff_tuple(
+ self,
+ ) -> Tuple[str, Optional[str], str, Column[Any]]:
+ return ("add_column", self.schema, self.table_name, self.column)
+
+ def to_column(self) -> Column:
+ return self.column
+
+ @classmethod
+ def from_column(cls, col: Column) -> AddColumnOp:
+ return cls(col.table.name, col, schema=col.table.schema)
+
+ @classmethod
+ def from_column_and_tablename(
+ cls,
+ schema: Optional[str],
+ tname: str,
+ col: Column[Any],
+ ) -> AddColumnOp:
+ return cls(tname, col, schema=schema)
+
+ @classmethod
+ def add_column(
+ cls,
+ operations: Operations,
+ table_name: str,
+ column: Column[Any],
+ *,
+ schema: Optional[str] = None,
+ ) -> None:
+ """Issue an "add column" instruction using the current
+ migration context.
+
+ e.g.::
+
+ from alembic import op
+ from sqlalchemy import Column, String
+
+ op.add_column("organization", Column("name", String()))
+
+ The :meth:`.Operations.add_column` method typically corresponds
+ to the SQL command "ALTER TABLE... ADD COLUMN". Within the scope
+ of this command, the column's name, datatype, nullability,
+ and optional server-generated defaults may be indicated.
+
+ .. note::
+
+ With the exception of NOT NULL constraints or single-column FOREIGN
+ KEY constraints, other kinds of constraints such as PRIMARY KEY,
+ UNIQUE or CHECK constraints **cannot** be generated using this
+ method; for these constraints, refer to operations such as
+ :meth:`.Operations.create_primary_key` and
+ :meth:`.Operations.create_check_constraint`. In particular, the
+ following :class:`~sqlalchemy.schema.Column` parameters are
+ **ignored**:
+
+ * :paramref:`~sqlalchemy.schema.Column.primary_key` - SQL databases
+ typically do not support an ALTER operation that can add
+ individual columns one at a time to an existing primary key
+ constraint, therefore it's less ambiguous to use the
+ :meth:`.Operations.create_primary_key` method, which assumes no
+ existing primary key constraint is present.
+ * :paramref:`~sqlalchemy.schema.Column.unique` - use the
+ :meth:`.Operations.create_unique_constraint` method
+ * :paramref:`~sqlalchemy.schema.Column.index` - use the
+ :meth:`.Operations.create_index` method
+
+
+ The provided :class:`~sqlalchemy.schema.Column` object may include a
+ :class:`~sqlalchemy.schema.ForeignKey` constraint directive,
+ referencing a remote table name. For this specific type of constraint,
+ Alembic will automatically emit a second ALTER statement in order to
+ add the single-column FOREIGN KEY constraint separately::
+
+ from alembic import op
+ from sqlalchemy import Column, INTEGER, ForeignKey
+
+ op.add_column(
+ "organization",
+ Column("account_id", INTEGER, ForeignKey("accounts.id")),
+ )
+
+ The column argument passed to :meth:`.Operations.add_column` is a
+ :class:`~sqlalchemy.schema.Column` construct, used in the same way it's
+ used in SQLAlchemy. In particular, values or functions to be indicated
+ as producing the column's default value on the database side are
+ specified using the ``server_default`` parameter, and not ``default``
+ which only specifies Python-side defaults::
+
+ from alembic import op
+ from sqlalchemy import Column, TIMESTAMP, func
+
+ # specify "DEFAULT NOW" along with the column add
+ op.add_column(
+ "account",
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
+ )
+
+ :param table_name: String name of the parent table.
+ :param column: a :class:`sqlalchemy.schema.Column` object
+ representing the new column.
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+
+ """
+
+ op = cls(table_name, column, schema=schema)
+ return operations.invoke(op)
+
+ @classmethod
+ def batch_add_column(
+ cls,
+ operations: BatchOperations,
+ column: Column[Any],
+ *,
+ insert_before: Optional[str] = None,
+ insert_after: Optional[str] = None,
+ ) -> None:
+ """Issue an "add column" instruction using the current
+ batch migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.add_column`
+
+ """
+
+ kw = {}
+ if insert_before:
+ kw["insert_before"] = insert_before
+ if insert_after:
+ kw["insert_after"] = insert_after
+
+ op = cls(
+ operations.impl.table_name,
+ column,
+ schema=operations.impl.schema,
+ **kw,
+ )
+ return operations.invoke(op)
+
+
+@Operations.register_operation("drop_column")
+@BatchOperations.register_operation("drop_column", "batch_drop_column")
+class DropColumnOp(AlterTableOp):
+ """Represent a drop column operation."""
+
+ def __init__(
+ self,
+ table_name: str,
+ column_name: str,
+ *,
+ schema: Optional[str] = None,
+ _reverse: Optional[AddColumnOp] = None,
+ **kw: Any,
+ ) -> None:
+ super().__init__(table_name, schema=schema)
+ self.column_name = column_name
+ self.kw = kw
+ self._reverse = _reverse
+
+ def to_diff_tuple(
+ self,
+ ) -> Tuple[str, Optional[str], str, Column[Any]]:
+ return (
+ "remove_column",
+ self.schema,
+ self.table_name,
+ self.to_column(),
+ )
+
+ def reverse(self) -> AddColumnOp:
+ if self._reverse is None:
+ raise ValueError(
+ "operation is not reversible; "
+ "original column is not present"
+ )
+
+ return AddColumnOp.from_column_and_tablename(
+ self.schema, self.table_name, self._reverse.column
+ )
+
+ @classmethod
+ def from_column_and_tablename(
+ cls,
+ schema: Optional[str],
+ tname: str,
+ col: Column[Any],
+ ) -> DropColumnOp:
+ return cls(
+ tname,
+ col.name,
+ schema=schema,
+ _reverse=AddColumnOp.from_column_and_tablename(schema, tname, col),
+ )
+
+ def to_column(
+ self, migration_context: Optional[MigrationContext] = None
+ ) -> Column:
+ if self._reverse is not None:
+ return self._reverse.column
+ schema_obj = schemaobj.SchemaObjects(migration_context)
+ return schema_obj.column(self.column_name, NULLTYPE)
+
+ @classmethod
+ def drop_column(
+ cls,
+ operations: Operations,
+ table_name: str,
+ column_name: str,
+ *,
+ schema: Optional[str] = None,
+ **kw: Any,
+ ) -> None:
+ """Issue a "drop column" instruction using the current
+ migration context.
+
+ e.g.::
+
+ drop_column("organization", "account_id")
+
+ :param table_name: name of table
+ :param column_name: name of column
+ :param schema: Optional schema name to operate within. To control
+ quoting of the schema outside of the default behavior, use
+ the SQLAlchemy construct
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
+ :param mssql_drop_check: Optional boolean. When ``True``, on
+ Microsoft SQL Server only, first
+ drop the CHECK constraint on the column using a
+ SQL-script-compatible
+ block that selects into a @variable from sys.check_constraints,
+ then exec's a separate DROP CONSTRAINT for that constraint.
+ :param mssql_drop_default: Optional boolean. When ``True``, on
+ Microsoft SQL Server only, first
+ drop the DEFAULT constraint on the column using a
+ SQL-script-compatible
+ block that selects into a @variable from sys.default_constraints,
+ then exec's a separate DROP CONSTRAINT for that default.
+ :param mssql_drop_foreign_key: Optional boolean. When ``True``, on
+ Microsoft SQL Server only, first
+ drop a single FOREIGN KEY constraint on the column using a
+ SQL-script-compatible
+ block that selects into a @variable from
+ sys.foreign_keys/sys.foreign_key_columns,
+ then exec's a separate DROP CONSTRAINT for that default. Only
+ works if the column has exactly one FK constraint which refers to
+ it, at the moment.
+
+ """
+
+ op = cls(table_name, column_name, schema=schema, **kw)
+ return operations.invoke(op)
+
+ @classmethod
+ def batch_drop_column(
+ cls, operations: BatchOperations, column_name: str, **kw: Any
+ ) -> None:
+ """Issue a "drop column" instruction using the current
+ batch migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.drop_column`
+
+ """
+ op = cls(
+ operations.impl.table_name,
+ column_name,
+ schema=operations.impl.schema,
+ **kw,
+ )
+ return operations.invoke(op)
+
+
+@Operations.register_operation("bulk_insert")
+class BulkInsertOp(MigrateOperation):
+ """Represent a bulk insert operation."""
+
+ def __init__(
+ self,
+ table: Union[Table, TableClause],
+ rows: List[dict],
+ *,
+ multiinsert: bool = True,
+ ) -> None:
+ self.table = table
+ self.rows = rows
+ self.multiinsert = multiinsert
+
+ @classmethod
+ def bulk_insert(
+ cls,
+ operations: Operations,
+ table: Union[Table, TableClause],
+ rows: List[dict],
+ *,
+ multiinsert: bool = True,
+ ) -> None:
+ """Issue a "bulk insert" operation using the current
+ migration context.
+
+ This provides a means of representing an INSERT of multiple rows
+ which works equally well in the context of executing on a live
+ connection as well as that of generating a SQL script. In the
+ case of a SQL script, the values are rendered inline into the
+ statement.
+
+ e.g.::
+
+ from alembic import op
+ from datetime import date
+ from sqlalchemy.sql import table, column
+ from sqlalchemy import String, Integer, Date
+
+ # Create an ad-hoc table to use for the insert statement.
+ accounts_table = table(
+ "account",
+ column("id", Integer),
+ column("name", String),
+ column("create_date", Date),
+ )
+
+ op.bulk_insert(
+ accounts_table,
+ [
+ {
+ "id": 1,
+ "name": "John Smith",
+ "create_date": date(2010, 10, 5),
+ },
+ {
+ "id": 2,
+ "name": "Ed Williams",
+ "create_date": date(2007, 5, 27),
+ },
+ {
+ "id": 3,
+ "name": "Wendy Jones",
+ "create_date": date(2008, 8, 15),
+ },
+ ],
+ )
+
+ When using --sql mode, some datatypes may not render inline
+ automatically, such as dates and other special types. When this
+ issue is present, :meth:`.Operations.inline_literal` may be used::
+
+ op.bulk_insert(
+ accounts_table,
+ [
+ {
+ "id": 1,
+ "name": "John Smith",
+ "create_date": op.inline_literal("2010-10-05"),
+ },
+ {
+ "id": 2,
+ "name": "Ed Williams",
+ "create_date": op.inline_literal("2007-05-27"),
+ },
+ {
+ "id": 3,
+ "name": "Wendy Jones",
+ "create_date": op.inline_literal("2008-08-15"),
+ },
+ ],
+ multiinsert=False,
+ )
+
+ When using :meth:`.Operations.inline_literal` in conjunction with
+ :meth:`.Operations.bulk_insert`, in order for the statement to work
+ in "online" (e.g. non --sql) mode, the
+ :paramref:`~.Operations.bulk_insert.multiinsert`
+ flag should be set to ``False``, which will have the effect of
+ individual INSERT statements being emitted to the database, each
+ with a distinct VALUES clause, so that the "inline" values can
+ still be rendered, rather than attempting to pass the values
+ as bound parameters.
+
+ :param table: a table object which represents the target of the INSERT.
+
+ :param rows: a list of dictionaries indicating rows.
+
+ :param multiinsert: when at its default of True and --sql mode is not
+ enabled, the INSERT statement will be executed using
+ "executemany()" style, where all elements in the list of
+ dictionaries are passed as bound parameters in a single
+ list. Setting this to False results in individual INSERT
+ statements being emitted per parameter set, and is needed
+ in those cases where non-literal values are present in the
+ parameter sets.
+
+ """
+
+ op = cls(table, rows, multiinsert=multiinsert)
+ operations.invoke(op)
+
+
+@Operations.register_operation("execute")
+@BatchOperations.register_operation("execute", "batch_execute")
+class ExecuteSQLOp(MigrateOperation):
+ """Represent an execute SQL operation."""
+
+ def __init__(
+ self,
+ sqltext: Union[Executable, str],
+ *,
+ execution_options: Optional[dict[str, Any]] = None,
+ ) -> None:
+ self.sqltext = sqltext
+ self.execution_options = execution_options
+
+ @classmethod
+ def execute(
+ cls,
+ operations: Operations,
+ sqltext: Union[Executable, str],
+ *,
+ execution_options: Optional[dict[str, Any]] = None,
+ ) -> None:
+ r"""Execute the given SQL using the current migration context.
+
+ The given SQL can be a plain string, e.g.::
+
+ op.execute("INSERT INTO table (foo) VALUES ('some value')")
+
+ Or it can be any kind of Core SQL Expression construct, such as
+ below where we use an update construct::
+
+ from sqlalchemy.sql import table, column
+ from sqlalchemy import String
+ from alembic import op
+
+ account = table("account", column("name", String))
+ op.execute(
+ account.update()
+ .where(account.c.name == op.inline_literal("account 1"))
+ .values({"name": op.inline_literal("account 2")})
+ )
+
+ Above, we made use of the SQLAlchemy
+ :func:`sqlalchemy.sql.expression.table` and
+ :func:`sqlalchemy.sql.expression.column` constructs to make a brief,
+ ad-hoc table construct just for our UPDATE statement. A full
+ :class:`~sqlalchemy.schema.Table` construct of course works perfectly
+ fine as well, though note it's a recommended practice to at least
+ ensure the definition of a table is self-contained within the migration
+ script, rather than imported from a module that may break compatibility
+ with older migrations.
+
+ In a SQL script context, the statement is emitted directly to the
+ output stream. There is *no* return result, however, as this
+ function is oriented towards generating a change script
+ that can run in "offline" mode. Additionally, parameterized
+ statements are discouraged here, as they *will not work* in offline
+ mode. Above, we use :meth:`.inline_literal` where parameters are
+ to be used.
+
+ For full interaction with a connected database where parameters can
+ also be used normally, use the "bind" available from the context::
+
+ from alembic import op
+
+ connection = op.get_bind()
+
+ connection.execute(
+ account.update()
+ .where(account.c.name == "account 1")
+ .values({"name": "account 2"})
+ )
+
+ Additionally, when passing the statement as a plain string, it is first
+ coerced into a :func:`sqlalchemy.sql.expression.text` construct
+ before being passed along. In the less likely case that the
+ literal SQL string contains a colon, it must be escaped with a
+ backslash, as::
+
+ op.execute(r"INSERT INTO table (foo) VALUES ('\:colon_value')")
+
+
+ :param sqltext: Any legal SQLAlchemy expression, including:
+
+ * a string
+ * a :func:`sqlalchemy.sql.expression.text` construct.
+ * a :func:`sqlalchemy.sql.expression.insert` construct.
+ * a :func:`sqlalchemy.sql.expression.update` construct.
+ * a :func:`sqlalchemy.sql.expression.delete` construct.
+ * Any "executable" described in SQLAlchemy Core documentation,
+ noting that no result set is returned.
+
+ .. note:: when passing a plain string, the statement is coerced into
+ a :func:`sqlalchemy.sql.expression.text` construct. This construct
+ considers symbols with colons, e.g. ``:foo`` to be bound parameters.
+ To avoid this, ensure that colon symbols are escaped, e.g.
+ ``\:foo``.
+
+ :param execution_options: Optional dictionary of
+ execution options, will be passed to
+ :meth:`sqlalchemy.engine.Connection.execution_options`.
+ """
+ op = cls(sqltext, execution_options=execution_options)
+ return operations.invoke(op)
+
+ @classmethod
+ def batch_execute(
+ cls,
+ operations: Operations,
+ sqltext: Union[Executable, str],
+ *,
+ execution_options: Optional[dict[str, Any]] = None,
+ ) -> None:
+ """Execute the given SQL using the current migration context.
+
+ .. seealso::
+
+ :meth:`.Operations.execute`
+
+ """
+ return cls.execute(
+ operations, sqltext, execution_options=execution_options
+ )
+
+ def to_diff_tuple(self) -> Tuple[str, Union[Executable, str]]:
+ return ("execute", self.sqltext)
+
+
+class OpContainer(MigrateOperation):
+ """Represent a sequence of operations operation."""
+
+ def __init__(self, ops: Sequence[MigrateOperation] = ()) -> None:
+ self.ops = list(ops)
+
+ def is_empty(self) -> bool:
+ return not self.ops
+
+ def as_diffs(self) -> Any:
+ return list(OpContainer._ops_as_diffs(self))
+
+ @classmethod
+ def _ops_as_diffs(
+ cls, migrations: OpContainer
+ ) -> Iterator[Tuple[Any, ...]]:
+ for op in migrations.ops:
+ if hasattr(op, "ops"):
+ yield from cls._ops_as_diffs(cast("OpContainer", op))
+ else:
+ yield op.to_diff_tuple()
+
+
+class ModifyTableOps(OpContainer):
+ """Contains a sequence of operations that all apply to a single Table."""
+
+ def __init__(
+ self,
+ table_name: str,
+ ops: Sequence[MigrateOperation],
+ *,
+ schema: Optional[str] = None,
+ ) -> None:
+ super().__init__(ops)
+ self.table_name = table_name
+ self.schema = schema
+
+ def reverse(self) -> ModifyTableOps:
+ return ModifyTableOps(
+ self.table_name,
+ ops=list(reversed([op.reverse() for op in self.ops])),
+ schema=self.schema,
+ )
+
+
+class UpgradeOps(OpContainer):
+ """contains a sequence of operations that would apply to the
+ 'upgrade' stream of a script.
+
+ .. seealso::
+
+ :ref:`customizing_revision`
+
+ """
+
+ def __init__(
+ self,
+ ops: Sequence[MigrateOperation] = (),
+ upgrade_token: str = "upgrades",
+ ) -> None:
+ super().__init__(ops=ops)
+ self.upgrade_token = upgrade_token
+
+ def reverse_into(self, downgrade_ops: DowngradeOps) -> DowngradeOps:
+ downgrade_ops.ops[:] = list( # type:ignore[index]
+ reversed([op.reverse() for op in self.ops])
+ )
+ return downgrade_ops
+
+ def reverse(self) -> DowngradeOps:
+ return self.reverse_into(DowngradeOps(ops=[]))
+
+
+class DowngradeOps(OpContainer):
+ """contains a sequence of operations that would apply to the
+ 'downgrade' stream of a script.
+
+ .. seealso::
+
+ :ref:`customizing_revision`
+
+ """
+
+ def __init__(
+ self,
+ ops: Sequence[MigrateOperation] = (),
+ downgrade_token: str = "downgrades",
+ ) -> None:
+ super().__init__(ops=ops)
+ self.downgrade_token = downgrade_token
+
+ def reverse(self):
+ return UpgradeOps(
+ ops=list(reversed([op.reverse() for op in self.ops]))
+ )
+
+
+class MigrationScript(MigrateOperation):
+ """represents a migration script.
+
+ E.g. when autogenerate encounters this object, this corresponds to the
+ production of an actual script file.
+
+ A normal :class:`.MigrationScript` object would contain a single
+ :class:`.UpgradeOps` and a single :class:`.DowngradeOps` directive.
+ These are accessible via the ``.upgrade_ops`` and ``.downgrade_ops``
+ attributes.
+
+ In the case of an autogenerate operation that runs multiple times,
+ such as the multiple database example in the "multidb" template,
+ the ``.upgrade_ops`` and ``.downgrade_ops`` attributes are disabled,
+ and instead these objects should be accessed via the ``.upgrade_ops_list``
+ and ``.downgrade_ops_list`` list-based attributes. These latter
+ attributes are always available at the very least as single-element lists.
+
+ .. seealso::
+
+ :ref:`customizing_revision`
+
+ """
+
+ _needs_render: Optional[bool]
+
+ def __init__(
+ self,
+ rev_id: Optional[str],
+ upgrade_ops: UpgradeOps,
+ downgrade_ops: DowngradeOps,
+ *,
+ message: Optional[str] = None,
+ imports: Set[str] = set(),
+ head: Optional[str] = None,
+ splice: Optional[bool] = None,
+ branch_label: Optional[_RevIdType] = None,
+ version_path: Optional[str] = None,
+ depends_on: Optional[_RevIdType] = None,
+ ) -> None:
+ self.rev_id = rev_id
+ self.message = message
+ self.imports = imports
+ self.head = head
+ self.splice = splice
+ self.branch_label = branch_label
+ self.version_path = version_path
+ self.depends_on = depends_on
+ self.upgrade_ops = upgrade_ops
+ self.downgrade_ops = downgrade_ops
+
+ @property
+ def upgrade_ops(self):
+ """An instance of :class:`.UpgradeOps`.
+
+ .. seealso::
+
+ :attr:`.MigrationScript.upgrade_ops_list`
+ """
+ if len(self._upgrade_ops) > 1:
+ raise ValueError(
+ "This MigrationScript instance has a multiple-entry "
+ "list for UpgradeOps; please use the "
+ "upgrade_ops_list attribute."
+ )
+ elif not self._upgrade_ops:
+ return None
+ else:
+ return self._upgrade_ops[0]
+
+ @upgrade_ops.setter
+ def upgrade_ops(self, upgrade_ops):
+ self._upgrade_ops = util.to_list(upgrade_ops)
+ for elem in self._upgrade_ops:
+ assert isinstance(elem, UpgradeOps)
+
+ @property
+ def downgrade_ops(self):
+ """An instance of :class:`.DowngradeOps`.
+
+ .. seealso::
+
+ :attr:`.MigrationScript.downgrade_ops_list`
+ """
+ if len(self._downgrade_ops) > 1:
+ raise ValueError(
+ "This MigrationScript instance has a multiple-entry "
+ "list for DowngradeOps; please use the "
+ "downgrade_ops_list attribute."
+ )
+ elif not self._downgrade_ops:
+ return None
+ else:
+ return self._downgrade_ops[0]
+
+ @downgrade_ops.setter
+ def downgrade_ops(self, downgrade_ops):
+ self._downgrade_ops = util.to_list(downgrade_ops)
+ for elem in self._downgrade_ops:
+ assert isinstance(elem, DowngradeOps)
+
+ @property
+ def upgrade_ops_list(self) -> List[UpgradeOps]:
+ """A list of :class:`.UpgradeOps` instances.
+
+ This is used in place of the :attr:`.MigrationScript.upgrade_ops`
+ attribute when dealing with a revision operation that does
+ multiple autogenerate passes.
+
+ """
+ return self._upgrade_ops
+
+ @property
+ def downgrade_ops_list(self) -> List[DowngradeOps]:
+ """A list of :class:`.DowngradeOps` instances.
+
+ This is used in place of the :attr:`.MigrationScript.downgrade_ops`
+ attribute when dealing with a revision operation that does
+ multiple autogenerate passes.
+
+ """
+ return self._downgrade_ops
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/operations/schemaobj.py b/Backend/venv/lib/python3.12/site-packages/alembic/operations/schemaobj.py
new file mode 100644
index 00000000..799f1139
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/operations/schemaobj.py
@@ -0,0 +1,287 @@
+from __future__ import annotations
+
+from typing import Any
+from typing import Dict
+from typing import List
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+from sqlalchemy import schema as sa_schema
+from sqlalchemy.sql.schema import Column
+from sqlalchemy.sql.schema import Constraint
+from sqlalchemy.sql.schema import Index
+from sqlalchemy.types import Integer
+from sqlalchemy.types import NULLTYPE
+
+from .. import util
+from ..util import sqla_compat
+
+if TYPE_CHECKING:
+ from sqlalchemy.sql.elements import ColumnElement
+ from sqlalchemy.sql.elements import TextClause
+ from sqlalchemy.sql.schema import CheckConstraint
+ from sqlalchemy.sql.schema import ForeignKey
+ from sqlalchemy.sql.schema import ForeignKeyConstraint
+ from sqlalchemy.sql.schema import MetaData
+ from sqlalchemy.sql.schema import PrimaryKeyConstraint
+ from sqlalchemy.sql.schema import Table
+ from sqlalchemy.sql.schema import UniqueConstraint
+ from sqlalchemy.sql.type_api import TypeEngine
+
+ from ..runtime.migration import MigrationContext
+
+
+class SchemaObjects:
+ def __init__(
+ self, migration_context: Optional[MigrationContext] = None
+ ) -> None:
+ self.migration_context = migration_context
+
+ def primary_key_constraint(
+ self,
+ name: Optional[sqla_compat._ConstraintNameDefined],
+ table_name: str,
+ cols: Sequence[str],
+ schema: Optional[str] = None,
+ **dialect_kw,
+ ) -> PrimaryKeyConstraint:
+ m = self.metadata()
+ columns = [sa_schema.Column(n, NULLTYPE) for n in cols]
+ t = sa_schema.Table(table_name, m, *columns, schema=schema)
+ # SQLAlchemy primary key constraint name arg is wrongly typed on
+ # the SQLAlchemy side through 2.0.5 at least
+ p = sa_schema.PrimaryKeyConstraint(
+ *[t.c[n] for n in cols], name=name, **dialect_kw # type: ignore
+ )
+ return p
+
+ def foreign_key_constraint(
+ self,
+ name: Optional[sqla_compat._ConstraintNameDefined],
+ source: str,
+ referent: str,
+ local_cols: List[str],
+ remote_cols: List[str],
+ onupdate: Optional[str] = None,
+ ondelete: Optional[str] = None,
+ deferrable: Optional[bool] = None,
+ source_schema: Optional[str] = None,
+ referent_schema: Optional[str] = None,
+ initially: Optional[str] = None,
+ match: Optional[str] = None,
+ **dialect_kw,
+ ) -> ForeignKeyConstraint:
+ m = self.metadata()
+ if source == referent and source_schema == referent_schema:
+ t1_cols = local_cols + remote_cols
+ else:
+ t1_cols = local_cols
+ sa_schema.Table(
+ referent,
+ m,
+ *[sa_schema.Column(n, NULLTYPE) for n in remote_cols],
+ schema=referent_schema,
+ )
+
+ t1 = sa_schema.Table(
+ source,
+ m,
+ *[
+ sa_schema.Column(n, NULLTYPE)
+ for n in util.unique_list(t1_cols)
+ ],
+ schema=source_schema,
+ )
+
+ tname = (
+ "%s.%s" % (referent_schema, referent)
+ if referent_schema
+ else referent
+ )
+
+ dialect_kw["match"] = match
+
+ f = sa_schema.ForeignKeyConstraint(
+ local_cols,
+ ["%s.%s" % (tname, n) for n in remote_cols],
+ name=name,
+ onupdate=onupdate,
+ ondelete=ondelete,
+ deferrable=deferrable,
+ initially=initially,
+ **dialect_kw,
+ )
+ t1.append_constraint(f)
+
+ return f
+
+ def unique_constraint(
+ self,
+ name: Optional[sqla_compat._ConstraintNameDefined],
+ source: str,
+ local_cols: Sequence[str],
+ schema: Optional[str] = None,
+ **kw,
+ ) -> UniqueConstraint:
+ t = sa_schema.Table(
+ source,
+ self.metadata(),
+ *[sa_schema.Column(n, NULLTYPE) for n in local_cols],
+ schema=schema,
+ )
+ kw["name"] = name
+ uq = sa_schema.UniqueConstraint(*[t.c[n] for n in local_cols], **kw)
+ # TODO: need event tests to ensure the event
+ # is fired off here
+ t.append_constraint(uq)
+ return uq
+
+ def check_constraint(
+ self,
+ name: Optional[sqla_compat._ConstraintNameDefined],
+ source: str,
+ condition: Union[str, TextClause, ColumnElement[Any]],
+ schema: Optional[str] = None,
+ **kw,
+ ) -> Union[CheckConstraint]:
+ t = sa_schema.Table(
+ source,
+ self.metadata(),
+ sa_schema.Column("x", Integer),
+ schema=schema,
+ )
+ ck = sa_schema.CheckConstraint(condition, name=name, **kw)
+ t.append_constraint(ck)
+ return ck
+
+ def generic_constraint(
+ self,
+ name: Optional[sqla_compat._ConstraintNameDefined],
+ table_name: str,
+ type_: Optional[str],
+ schema: Optional[str] = None,
+ **kw,
+ ) -> Any:
+ t = self.table(table_name, schema=schema)
+ types: Dict[Optional[str], Any] = {
+ "foreignkey": lambda name: sa_schema.ForeignKeyConstraint(
+ [], [], name=name
+ ),
+ "primary": sa_schema.PrimaryKeyConstraint,
+ "unique": sa_schema.UniqueConstraint,
+ "check": lambda name: sa_schema.CheckConstraint("", name=name),
+ None: sa_schema.Constraint,
+ }
+ try:
+ const = types[type_]
+ except KeyError as ke:
+ raise TypeError(
+ "'type' can be one of %s"
+ % ", ".join(sorted(repr(x) for x in types))
+ ) from ke
+ else:
+ const = const(name=name)
+ t.append_constraint(const)
+ return const
+
+ def metadata(self) -> MetaData:
+ kw = {}
+ if (
+ self.migration_context is not None
+ and "target_metadata" in self.migration_context.opts
+ ):
+ mt = self.migration_context.opts["target_metadata"]
+ if hasattr(mt, "naming_convention"):
+ kw["naming_convention"] = mt.naming_convention
+ return sa_schema.MetaData(**kw)
+
+ def table(self, name: str, *columns, **kw) -> Table:
+ m = self.metadata()
+
+ cols = [
+ sqla_compat._copy(c) if c.table is not None else c
+ for c in columns
+ if isinstance(c, Column)
+ ]
+ # these flags have already added their UniqueConstraint /
+ # Index objects to the table, so flip them off here.
+ # SQLAlchemy tometadata() avoids this instead by preserving the
+ # flags and skipping the constraints that have _type_bound on them,
+ # but for a migration we'd rather list out the constraints
+ # explicitly.
+ _constraints_included = kw.pop("_constraints_included", False)
+ if _constraints_included:
+ for c in cols:
+ c.unique = c.index = False
+
+ t = sa_schema.Table(name, m, *cols, **kw)
+
+ constraints = [
+ sqla_compat._copy(elem, target_table=t)
+ if getattr(elem, "parent", None) is not t
+ and getattr(elem, "parent", None) is not None
+ else elem
+ for elem in columns
+ if isinstance(elem, (Constraint, Index))
+ ]
+
+ for const in constraints:
+ t.append_constraint(const)
+
+ for f in t.foreign_keys:
+ self._ensure_table_for_fk(m, f)
+ return t
+
+ def column(self, name: str, type_: TypeEngine, **kw) -> Column:
+ return sa_schema.Column(name, type_, **kw)
+
+ def index(
+ self,
+ name: Optional[str],
+ tablename: Optional[str],
+ columns: Sequence[Union[str, TextClause, ColumnElement[Any]]],
+ schema: Optional[str] = None,
+ **kw,
+ ) -> Index:
+ t = sa_schema.Table(
+ tablename or "no_table",
+ self.metadata(),
+ schema=schema,
+ )
+ kw["_table"] = t
+ idx = sa_schema.Index(
+ name,
+ *[util.sqla_compat._textual_index_column(t, n) for n in columns],
+ **kw,
+ )
+ return idx
+
+ def _parse_table_key(self, table_key: str) -> Tuple[Optional[str], str]:
+ if "." in table_key:
+ tokens = table_key.split(".")
+ sname: Optional[str] = ".".join(tokens[0:-1])
+ tname = tokens[-1]
+ else:
+ tname = table_key
+ sname = None
+ return (sname, tname)
+
+ def _ensure_table_for_fk(self, metadata: MetaData, fk: ForeignKey) -> None:
+ """create a placeholder Table object for the referent of a
+ ForeignKey.
+
+ """
+ if isinstance(fk._colspec, str): # type:ignore[attr-defined]
+ table_key, cname = fk._colspec.rsplit( # type:ignore[attr-defined]
+ ".", 1
+ )
+ sname, tname = self._parse_table_key(table_key)
+ if table_key not in metadata.tables:
+ rel_t = sa_schema.Table(tname, metadata, schema=sname)
+ else:
+ rel_t = metadata.tables[table_key]
+ if cname not in rel_t.c:
+ rel_t.append_column(sa_schema.Column(cname, NULLTYPE))
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/operations/toimpl.py b/Backend/venv/lib/python3.12/site-packages/alembic/operations/toimpl.py
new file mode 100644
index 00000000..ba974b62
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/operations/toimpl.py
@@ -0,0 +1,223 @@
+from typing import TYPE_CHECKING
+
+from sqlalchemy import schema as sa_schema
+
+from . import ops
+from .base import Operations
+from ..util.sqla_compat import _copy
+from ..util.sqla_compat import sqla_2
+
+if TYPE_CHECKING:
+ from sqlalchemy.sql.schema import Table
+
+
+@Operations.implementation_for(ops.AlterColumnOp)
+def alter_column(
+ operations: "Operations", operation: "ops.AlterColumnOp"
+) -> None:
+ compiler = operations.impl.dialect.statement_compiler(
+ operations.impl.dialect, None
+ )
+
+ existing_type = operation.existing_type
+ existing_nullable = operation.existing_nullable
+ existing_server_default = operation.existing_server_default
+ type_ = operation.modify_type
+ column_name = operation.column_name
+ table_name = operation.table_name
+ schema = operation.schema
+ server_default = operation.modify_server_default
+ new_column_name = operation.modify_name
+ nullable = operation.modify_nullable
+ comment = operation.modify_comment
+ existing_comment = operation.existing_comment
+
+ def _count_constraint(constraint):
+ return not isinstance(constraint, sa_schema.PrimaryKeyConstraint) and (
+ not constraint._create_rule or constraint._create_rule(compiler)
+ )
+
+ if existing_type and type_:
+ t = operations.schema_obj.table(
+ table_name,
+ sa_schema.Column(column_name, existing_type),
+ schema=schema,
+ )
+ for constraint in t.constraints:
+ if _count_constraint(constraint):
+ operations.impl.drop_constraint(constraint)
+
+ operations.impl.alter_column(
+ table_name,
+ column_name,
+ nullable=nullable,
+ server_default=server_default,
+ name=new_column_name,
+ type_=type_,
+ schema=schema,
+ existing_type=existing_type,
+ existing_server_default=existing_server_default,
+ existing_nullable=existing_nullable,
+ comment=comment,
+ existing_comment=existing_comment,
+ **operation.kw,
+ )
+
+ if type_:
+ t = operations.schema_obj.table(
+ table_name,
+ operations.schema_obj.column(column_name, type_),
+ schema=schema,
+ )
+ for constraint in t.constraints:
+ if _count_constraint(constraint):
+ operations.impl.add_constraint(constraint)
+
+
+@Operations.implementation_for(ops.DropTableOp)
+def drop_table(operations: "Operations", operation: "ops.DropTableOp") -> None:
+ operations.impl.drop_table(
+ operation.to_table(operations.migration_context)
+ )
+
+
+@Operations.implementation_for(ops.DropColumnOp)
+def drop_column(
+ operations: "Operations", operation: "ops.DropColumnOp"
+) -> None:
+ column = operation.to_column(operations.migration_context)
+ operations.impl.drop_column(
+ operation.table_name, column, schema=operation.schema, **operation.kw
+ )
+
+
+@Operations.implementation_for(ops.CreateIndexOp)
+def create_index(
+ operations: "Operations", operation: "ops.CreateIndexOp"
+) -> None:
+ idx = operation.to_index(operations.migration_context)
+ kw = {}
+ if operation.if_not_exists is not None:
+ if not sqla_2:
+ raise NotImplementedError("SQLAlchemy 2.0+ required")
+
+ kw["if_not_exists"] = operation.if_not_exists
+ operations.impl.create_index(idx, **kw)
+
+
+@Operations.implementation_for(ops.DropIndexOp)
+def drop_index(operations: "Operations", operation: "ops.DropIndexOp") -> None:
+ kw = {}
+ if operation.if_exists is not None:
+ if not sqla_2:
+ raise NotImplementedError("SQLAlchemy 2.0+ required")
+
+ kw["if_exists"] = operation.if_exists
+
+ operations.impl.drop_index(
+ operation.to_index(operations.migration_context),
+ **kw,
+ )
+
+
+@Operations.implementation_for(ops.CreateTableOp)
+def create_table(
+ operations: "Operations", operation: "ops.CreateTableOp"
+) -> "Table":
+ table = operation.to_table(operations.migration_context)
+ operations.impl.create_table(table)
+ return table
+
+
+@Operations.implementation_for(ops.RenameTableOp)
+def rename_table(
+ operations: "Operations", operation: "ops.RenameTableOp"
+) -> None:
+ operations.impl.rename_table(
+ operation.table_name, operation.new_table_name, schema=operation.schema
+ )
+
+
+@Operations.implementation_for(ops.CreateTableCommentOp)
+def create_table_comment(
+ operations: "Operations", operation: "ops.CreateTableCommentOp"
+) -> None:
+ table = operation.to_table(operations.migration_context)
+ operations.impl.create_table_comment(table)
+
+
+@Operations.implementation_for(ops.DropTableCommentOp)
+def drop_table_comment(
+ operations: "Operations", operation: "ops.DropTableCommentOp"
+) -> None:
+ table = operation.to_table(operations.migration_context)
+ operations.impl.drop_table_comment(table)
+
+
+@Operations.implementation_for(ops.AddColumnOp)
+def add_column(operations: "Operations", operation: "ops.AddColumnOp") -> None:
+ table_name = operation.table_name
+ column = operation.column
+ schema = operation.schema
+ kw = operation.kw
+
+ if column.table is not None:
+ column = _copy(column)
+
+ t = operations.schema_obj.table(table_name, column, schema=schema)
+ operations.impl.add_column(table_name, column, schema=schema, **kw)
+
+ for constraint in t.constraints:
+ if not isinstance(constraint, sa_schema.PrimaryKeyConstraint):
+ operations.impl.add_constraint(constraint)
+ for index in t.indexes:
+ operations.impl.create_index(index)
+
+ with_comment = (
+ operations.impl.dialect.supports_comments
+ and not operations.impl.dialect.inline_comments
+ )
+ comment = column.comment
+ if comment and with_comment:
+ operations.impl.create_column_comment(column)
+
+
+@Operations.implementation_for(ops.AddConstraintOp)
+def create_constraint(
+ operations: "Operations", operation: "ops.AddConstraintOp"
+) -> None:
+ operations.impl.add_constraint(
+ operation.to_constraint(operations.migration_context)
+ )
+
+
+@Operations.implementation_for(ops.DropConstraintOp)
+def drop_constraint(
+ operations: "Operations", operation: "ops.DropConstraintOp"
+) -> None:
+ operations.impl.drop_constraint(
+ operations.schema_obj.generic_constraint(
+ operation.constraint_name,
+ operation.table_name,
+ operation.constraint_type,
+ schema=operation.schema,
+ )
+ )
+
+
+@Operations.implementation_for(ops.BulkInsertOp)
+def bulk_insert(
+ operations: "Operations", operation: "ops.BulkInsertOp"
+) -> None:
+ operations.impl.bulk_insert( # type: ignore[union-attr]
+ operation.table, operation.rows, multiinsert=operation.multiinsert
+ )
+
+
+@Operations.implementation_for(ops.ExecuteSQLOp)
+def execute_sql(
+ operations: "Operations", operation: "ops.ExecuteSQLOp"
+) -> None:
+ operations.migration_context.impl.execute(
+ operation.sqltext, execution_options=operation.execution_options
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/py.typed b/Backend/venv/lib/python3.12/site-packages/alembic/py.typed
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__init__.py b/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..2a20d3a1
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/environment.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/environment.cpython-312.pyc
new file mode 100644
index 00000000..05e77747
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/environment.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/migration.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/migration.cpython-312.pyc
new file mode 100644
index 00000000..e050d35b
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/runtime/__pycache__/migration.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/runtime/environment.py b/Backend/venv/lib/python3.12/site-packages/alembic/runtime/environment.py
new file mode 100644
index 00000000..7640f563
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/runtime/environment.py
@@ -0,0 +1,1043 @@
+from __future__ import annotations
+
+from typing import Any
+from typing import Callable
+from typing import Collection
+from typing import ContextManager
+from typing import Dict
+from typing import List
+from typing import Mapping
+from typing import MutableMapping
+from typing import Optional
+from typing import overload
+from typing import TextIO
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+from sqlalchemy.sql.schema import Column
+from sqlalchemy.sql.schema import FetchedValue
+from typing_extensions import Literal
+
+from .migration import _ProxyTransaction
+from .migration import MigrationContext
+from .. import util
+from ..operations import Operations
+from ..script.revision import _GetRevArg
+
+if TYPE_CHECKING:
+ from sqlalchemy.engine import URL
+ from sqlalchemy.engine.base import Connection
+ from sqlalchemy.sql import Executable
+ from sqlalchemy.sql.schema import MetaData
+ from sqlalchemy.sql.schema import SchemaItem
+ from sqlalchemy.sql.type_api import TypeEngine
+
+ from .migration import MigrationInfo
+ from ..autogenerate.api import AutogenContext
+ from ..config import Config
+ from ..ddl import DefaultImpl
+ from ..operations.ops import MigrationScript
+ from ..script.base import ScriptDirectory
+
+_RevNumber = Optional[Union[str, Tuple[str, ...]]]
+
+ProcessRevisionDirectiveFn = Callable[
+ [MigrationContext, _GetRevArg, List["MigrationScript"]], None
+]
+
+RenderItemFn = Callable[
+ [str, Any, "AutogenContext"], Union[str, Literal[False]]
+]
+
+NameFilterType = Literal[
+ "schema",
+ "table",
+ "column",
+ "index",
+ "unique_constraint",
+ "foreign_key_constraint",
+]
+NameFilterParentNames = MutableMapping[
+ Literal["schema_name", "table_name", "schema_qualified_table_name"],
+ Optional[str],
+]
+IncludeNameFn = Callable[
+ [Optional[str], NameFilterType, NameFilterParentNames], bool
+]
+
+IncludeObjectFn = Callable[
+ [
+ "SchemaItem",
+ Optional[str],
+ NameFilterType,
+ bool,
+ Optional["SchemaItem"],
+ ],
+ bool,
+]
+
+OnVersionApplyFn = Callable[
+ [MigrationContext, "MigrationInfo", Collection[Any], Mapping[str, Any]],
+ None,
+]
+
+CompareServerDefault = Callable[
+ [
+ MigrationContext,
+ "Column[Any]",
+ "Column[Any]",
+ Optional[str],
+ Optional[FetchedValue],
+ Optional[str],
+ ],
+ Optional[bool],
+]
+
+CompareType = Callable[
+ [
+ MigrationContext,
+ "Column[Any]",
+ "Column[Any]",
+ "TypeEngine[Any]",
+ "TypeEngine[Any]",
+ ],
+ Optional[bool],
+]
+
+
+class EnvironmentContext(util.ModuleClsProxy):
+
+ """A configurational facade made available in an ``env.py`` script.
+
+ The :class:`.EnvironmentContext` acts as a *facade* to the more
+ nuts-and-bolts objects of :class:`.MigrationContext` as well as certain
+ aspects of :class:`.Config`,
+ within the context of the ``env.py`` script that is invoked by
+ most Alembic commands.
+
+ :class:`.EnvironmentContext` is normally instantiated
+ when a command in :mod:`alembic.command` is run. It then makes
+ itself available in the ``alembic.context`` module for the scope
+ of the command. From within an ``env.py`` script, the current
+ :class:`.EnvironmentContext` is available by importing this module.
+
+ :class:`.EnvironmentContext` also supports programmatic usage.
+ At this level, it acts as a Python context manager, that is, is
+ intended to be used using the
+ ``with:`` statement. A typical use of :class:`.EnvironmentContext`::
+
+ from alembic.config import Config
+ from alembic.script import ScriptDirectory
+
+ config = Config()
+ config.set_main_option("script_location", "myapp:migrations")
+ script = ScriptDirectory.from_config(config)
+
+
+ def my_function(rev, context):
+ '''do something with revision "rev", which
+ will be the current database revision,
+ and "context", which is the MigrationContext
+ that the env.py will create'''
+
+
+ with EnvironmentContext(
+ config,
+ script,
+ fn=my_function,
+ as_sql=False,
+ starting_rev="base",
+ destination_rev="head",
+ tag="sometag",
+ ):
+ script.run_env()
+
+ The above script will invoke the ``env.py`` script
+ within the migration environment. If and when ``env.py``
+ calls :meth:`.MigrationContext.run_migrations`, the
+ ``my_function()`` function above will be called
+ by the :class:`.MigrationContext`, given the context
+ itself as well as the current revision in the database.
+
+ .. note::
+
+ For most API usages other than full blown
+ invocation of migration scripts, the :class:`.MigrationContext`
+ and :class:`.ScriptDirectory` objects can be created and
+ used directly. The :class:`.EnvironmentContext` object
+ is *only* needed when you need to actually invoke the
+ ``env.py`` module present in the migration environment.
+
+ """
+
+ _migration_context: Optional[MigrationContext] = None
+
+ config: Config = None # type:ignore[assignment]
+ """An instance of :class:`.Config` representing the
+ configuration file contents as well as other variables
+ set programmatically within it."""
+
+ script: ScriptDirectory = None # type:ignore[assignment]
+ """An instance of :class:`.ScriptDirectory` which provides
+ programmatic access to version files within the ``versions/``
+ directory.
+
+ """
+
+ def __init__(
+ self, config: Config, script: ScriptDirectory, **kw: Any
+ ) -> None:
+ r"""Construct a new :class:`.EnvironmentContext`.
+
+ :param config: a :class:`.Config` instance.
+ :param script: a :class:`.ScriptDirectory` instance.
+ :param \**kw: keyword options that will be ultimately
+ passed along to the :class:`.MigrationContext` when
+ :meth:`.EnvironmentContext.configure` is called.
+
+ """
+ self.config = config
+ self.script = script
+ self.context_opts = kw
+
+ def __enter__(self) -> EnvironmentContext:
+ """Establish a context which provides a
+ :class:`.EnvironmentContext` object to
+ env.py scripts.
+
+ The :class:`.EnvironmentContext` will
+ be made available as ``from alembic import context``.
+
+ """
+ self._install_proxy()
+ return self
+
+ def __exit__(self, *arg: Any, **kw: Any) -> None:
+ self._remove_proxy()
+
+ def is_offline_mode(self) -> bool:
+ """Return True if the current migrations environment
+ is running in "offline mode".
+
+ This is ``True`` or ``False`` depending
+ on the ``--sql`` flag passed.
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ """
+ return self.context_opts.get("as_sql", False)
+
+ def is_transactional_ddl(self):
+ """Return True if the context is configured to expect a
+ transactional DDL capable backend.
+
+ This defaults to the type of database in use, and
+ can be overridden by the ``transactional_ddl`` argument
+ to :meth:`.configure`
+
+ This function requires that a :class:`.MigrationContext`
+ has first been made available via :meth:`.configure`.
+
+ """
+ return self.get_context().impl.transactional_ddl
+
+ def requires_connection(self) -> bool:
+ return not self.is_offline_mode()
+
+ def get_head_revision(self) -> _RevNumber:
+ """Return the hex identifier of the 'head' script revision.
+
+ If the script directory has multiple heads, this
+ method raises a :class:`.CommandError`;
+ :meth:`.EnvironmentContext.get_head_revisions` should be preferred.
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ .. seealso:: :meth:`.EnvironmentContext.get_head_revisions`
+
+ """
+ return self.script.as_revision_number("head")
+
+ def get_head_revisions(self) -> _RevNumber:
+ """Return the hex identifier of the 'heads' script revision(s).
+
+ This returns a tuple containing the version number of all
+ heads in the script directory.
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ """
+ return self.script.as_revision_number("heads")
+
+ def get_starting_revision_argument(self) -> _RevNumber:
+ """Return the 'starting revision' argument,
+ if the revision was passed using ``start:end``.
+
+ This is only meaningful in "offline" mode.
+ Returns ``None`` if no value is available
+ or was configured.
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ """
+ if self._migration_context is not None:
+ return self.script.as_revision_number(
+ self.get_context()._start_from_rev
+ )
+ elif "starting_rev" in self.context_opts:
+ return self.script.as_revision_number(
+ self.context_opts["starting_rev"]
+ )
+ else:
+ # this should raise only in the case that a command
+ # is being run where the "starting rev" is never applicable;
+ # this is to catch scripts which rely upon this in
+ # non-sql mode or similar
+ raise util.CommandError(
+ "No starting revision argument is available."
+ )
+
+ def get_revision_argument(self) -> _RevNumber:
+ """Get the 'destination' revision argument.
+
+ This is typically the argument passed to the
+ ``upgrade`` or ``downgrade`` command.
+
+ If it was specified as ``head``, the actual
+ version number is returned; if specified
+ as ``base``, ``None`` is returned.
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ """
+ return self.script.as_revision_number(
+ self.context_opts["destination_rev"]
+ )
+
+ def get_tag_argument(self) -> Optional[str]:
+ """Return the value passed for the ``--tag`` argument, if any.
+
+ The ``--tag`` argument is not used directly by Alembic,
+ but is available for custom ``env.py`` configurations that
+ wish to use it; particularly for offline generation scripts
+ that wish to generate tagged filenames.
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ .. seealso::
+
+ :meth:`.EnvironmentContext.get_x_argument` - a newer and more
+ open ended system of extending ``env.py`` scripts via the command
+ line.
+
+ """
+ return self.context_opts.get("tag", None)
+
+ @overload
+ def get_x_argument(self, as_dictionary: Literal[False]) -> List[str]:
+ ...
+
+ @overload
+ def get_x_argument(self, as_dictionary: Literal[True]) -> Dict[str, str]:
+ ...
+
+ @overload
+ def get_x_argument(
+ self, as_dictionary: bool = ...
+ ) -> Union[List[str], Dict[str, str]]:
+ ...
+
+ def get_x_argument(
+ self, as_dictionary: bool = False
+ ) -> Union[List[str], Dict[str, str]]:
+ """Return the value(s) passed for the ``-x`` argument, if any.
+
+ The ``-x`` argument is an open ended flag that allows any user-defined
+ value or values to be passed on the command line, then available
+ here for consumption by a custom ``env.py`` script.
+
+ The return value is a list, returned directly from the ``argparse``
+ structure. If ``as_dictionary=True`` is passed, the ``x`` arguments
+ are parsed using ``key=value`` format into a dictionary that is
+ then returned.
+
+ For example, to support passing a database URL on the command line,
+ the standard ``env.py`` script can be modified like this::
+
+ cmd_line_url = context.get_x_argument(
+ as_dictionary=True).get('dbname')
+ if cmd_line_url:
+ engine = create_engine(cmd_line_url)
+ else:
+ engine = engine_from_config(
+ config.get_section(config.config_ini_section),
+ prefix='sqlalchemy.',
+ poolclass=pool.NullPool)
+
+ This then takes effect by running the ``alembic`` script as::
+
+ alembic -x dbname=postgresql://user:pass@host/dbname upgrade head
+
+ This function does not require that the :class:`.MigrationContext`
+ has been configured.
+
+ .. seealso::
+
+ :meth:`.EnvironmentContext.get_tag_argument`
+
+ :attr:`.Config.cmd_opts`
+
+ """
+ if self.config.cmd_opts is not None:
+ value = self.config.cmd_opts.x or []
+ else:
+ value = []
+ if as_dictionary:
+ value = dict(arg.split("=", 1) for arg in value)
+ return value
+
+ def configure(
+ self,
+ connection: Optional[Connection] = None,
+ url: Optional[Union[str, URL]] = None,
+ dialect_name: Optional[str] = None,
+ dialect_opts: Optional[Dict[str, Any]] = None,
+ transactional_ddl: Optional[bool] = None,
+ transaction_per_migration: bool = False,
+ output_buffer: Optional[TextIO] = None,
+ starting_rev: Optional[str] = None,
+ tag: Optional[str] = None,
+ template_args: Optional[Dict[str, Any]] = None,
+ render_as_batch: bool = False,
+ target_metadata: Optional[MetaData] = None,
+ include_name: Optional[IncludeNameFn] = None,
+ include_object: Optional[IncludeObjectFn] = None,
+ include_schemas: bool = False,
+ process_revision_directives: Optional[
+ ProcessRevisionDirectiveFn
+ ] = None,
+ compare_type: Union[bool, CompareType] = True,
+ compare_server_default: Union[bool, CompareServerDefault] = False,
+ render_item: Optional[RenderItemFn] = None,
+ literal_binds: bool = False,
+ upgrade_token: str = "upgrades",
+ downgrade_token: str = "downgrades",
+ alembic_module_prefix: str = "op.",
+ sqlalchemy_module_prefix: str = "sa.",
+ user_module_prefix: Optional[str] = None,
+ on_version_apply: Optional[OnVersionApplyFn] = None,
+ **kw: Any,
+ ) -> None:
+ """Configure a :class:`.MigrationContext` within this
+ :class:`.EnvironmentContext` which will provide database
+ connectivity and other configuration to a series of
+ migration scripts.
+
+ Many methods on :class:`.EnvironmentContext` require that
+ this method has been called in order to function, as they
+ ultimately need to have database access or at least access
+ to the dialect in use. Those which do are documented as such.
+
+ The important thing needed by :meth:`.configure` is a
+ means to determine what kind of database dialect is in use.
+ An actual connection to that database is needed only if
+ the :class:`.MigrationContext` is to be used in
+ "online" mode.
+
+ If the :meth:`.is_offline_mode` function returns ``True``,
+ then no connection is needed here. Otherwise, the
+ ``connection`` parameter should be present as an
+ instance of :class:`sqlalchemy.engine.Connection`.
+
+ This function is typically called from the ``env.py``
+ script within a migration environment. It can be called
+ multiple times for an invocation. The most recent
+ :class:`~sqlalchemy.engine.Connection`
+ for which it was called is the one that will be operated upon
+ by the next call to :meth:`.run_migrations`.
+
+ General parameters:
+
+ :param connection: a :class:`~sqlalchemy.engine.Connection`
+ to use
+ for SQL execution in "online" mode. When present, is also
+ used to determine the type of dialect in use.
+ :param url: a string database url, or a
+ :class:`sqlalchemy.engine.url.URL` object.
+ The type of dialect to be used will be derived from this if
+ ``connection`` is not passed.
+ :param dialect_name: string name of a dialect, such as
+ "postgresql", "mssql", etc.
+ The type of dialect to be used will be derived from this if
+ ``connection`` and ``url`` are not passed.
+ :param dialect_opts: dictionary of options to be passed to dialect
+ constructor.
+ :param transactional_ddl: Force the usage of "transactional"
+ DDL on or off;
+ this otherwise defaults to whether or not the dialect in
+ use supports it.
+ :param transaction_per_migration: if True, nest each migration script
+ in a transaction rather than the full series of migrations to
+ run.
+ :param output_buffer: a file-like object that will be used
+ for textual output
+ when the ``--sql`` option is used to generate SQL scripts.
+ Defaults to
+ ``sys.stdout`` if not passed here and also not present on
+ the :class:`.Config`
+ object. The value here overrides that of the :class:`.Config`
+ object.
+ :param output_encoding: when using ``--sql`` to generate SQL
+ scripts, apply this encoding to the string output.
+ :param literal_binds: when using ``--sql`` to generate SQL
+ scripts, pass through the ``literal_binds`` flag to the compiler
+ so that any literal values that would ordinarily be bound
+ parameters are converted to plain strings.
+
+ .. warning:: Dialects can typically only handle simple datatypes
+ like strings and numbers for auto-literal generation. Datatypes
+ like dates, intervals, and others may still require manual
+ formatting, typically using :meth:`.Operations.inline_literal`.
+
+ .. note:: the ``literal_binds`` flag is ignored on SQLAlchemy
+ versions prior to 0.8 where this feature is not supported.
+
+ .. seealso::
+
+ :meth:`.Operations.inline_literal`
+
+ :param starting_rev: Override the "starting revision" argument
+ when using ``--sql`` mode.
+ :param tag: a string tag for usage by custom ``env.py`` scripts.
+ Set via the ``--tag`` option, can be overridden here.
+ :param template_args: dictionary of template arguments which
+ will be added to the template argument environment when
+ running the "revision" command. Note that the script environment
+ is only run within the "revision" command if the --autogenerate
+ option is used, or if the option "revision_environment=true"
+ is present in the alembic.ini file.
+
+ :param version_table: The name of the Alembic version table.
+ The default is ``'alembic_version'``.
+ :param version_table_schema: Optional schema to place version
+ table within.
+ :param version_table_pk: boolean, whether the Alembic version table
+ should use a primary key constraint for the "value" column; this
+ only takes effect when the table is first created.
+ Defaults to True; setting to False should not be necessary and is
+ here for backwards compatibility reasons.
+ :param on_version_apply: a callable or collection of callables to be
+ run for each migration step.
+ The callables will be run in the order they are given, once for
+ each migration step, after the respective operation has been
+ applied but before its transaction is finalized.
+ Each callable accepts no positional arguments and the following
+ keyword arguments:
+
+ * ``ctx``: the :class:`.MigrationContext` running the migration,
+ * ``step``: a :class:`.MigrationInfo` representing the
+ step currently being applied,
+ * ``heads``: a collection of version strings representing the
+ current heads,
+ * ``run_args``: the ``**kwargs`` passed to :meth:`.run_migrations`.
+
+ Parameters specific to the autogenerate feature, when
+ ``alembic revision`` is run with the ``--autogenerate`` feature:
+
+ :param target_metadata: a :class:`sqlalchemy.schema.MetaData`
+ object, or a sequence of :class:`~sqlalchemy.schema.MetaData`
+ objects, that will be consulted during autogeneration.
+ The tables present in each :class:`~sqlalchemy.schema.MetaData`
+ will be compared against
+ what is locally available on the target
+ :class:`~sqlalchemy.engine.Connection`
+ to produce candidate upgrade/downgrade operations.
+ :param compare_type: Indicates type comparison behavior during
+ an autogenerate
+ operation. Defaults to ``True`` turning on type comparison, which
+ has good accuracy on most backends. See :ref:`compare_types`
+ for an example as well as information on other type
+ comparison options. Set to ``False`` which disables type
+ comparison. A callable can also be passed to provide custom type
+ comparison, see :ref:`compare_types` for additional details.
+
+ .. versionchanged:: 1.12.0 The default value of
+ :paramref:`.EnvironmentContext.configure.compare_type` has been
+ changed to ``True``.
+
+ .. seealso::
+
+ :ref:`compare_types`
+
+ :paramref:`.EnvironmentContext.configure.compare_server_default`
+
+ :param compare_server_default: Indicates server default comparison
+ behavior during
+ an autogenerate operation. Defaults to ``False`` which disables
+ server default
+ comparison. Set to ``True`` to turn on server default comparison,
+ which has
+ varied accuracy depending on backend.
+
+ To customize server default comparison behavior, a callable may
+ be specified
+ which can filter server default comparisons during an
+ autogenerate operation.
+ defaults during an autogenerate operation. The format of this
+ callable is::
+
+ def my_compare_server_default(context, inspected_column,
+ metadata_column, inspected_default, metadata_default,
+ rendered_metadata_default):
+ # return True if the defaults are different,
+ # False if not, or None to allow the default implementation
+ # to compare these defaults
+ return None
+
+ context.configure(
+ # ...
+ compare_server_default = my_compare_server_default
+ )
+
+ ``inspected_column`` is a dictionary structure as returned by
+ :meth:`sqlalchemy.engine.reflection.Inspector.get_columns`, whereas
+ ``metadata_column`` is a :class:`sqlalchemy.schema.Column` from
+ the local model environment.
+
+ A return value of ``None`` indicates to allow default server default
+ comparison
+ to proceed. Note that some backends such as Postgresql actually
+ execute
+ the two defaults on the database side to compare for equivalence.
+
+ .. seealso::
+
+ :paramref:`.EnvironmentContext.configure.compare_type`
+
+ :param include_name: A callable function which is given
+ the chance to return ``True`` or ``False`` for any database reflected
+ object based on its name, including database schema names when
+ the :paramref:`.EnvironmentContext.configure.include_schemas` flag
+ is set to ``True``.
+
+ The function accepts the following positional arguments:
+
+ * ``name``: the name of the object, such as schema name or table name.
+ Will be ``None`` when indicating the default schema name of the
+ database connection.
+ * ``type``: a string describing the type of object; currently
+ ``"schema"``, ``"table"``, ``"column"``, ``"index"``,
+ ``"unique_constraint"``, or ``"foreign_key_constraint"``
+ * ``parent_names``: a dictionary of "parent" object names, that are
+ relative to the name being given. Keys in this dictionary may
+ include: ``"schema_name"``, ``"table_name"`` or
+ ``"schema_qualified_table_name"``.
+
+ E.g.::
+
+ def include_name(name, type_, parent_names):
+ if type_ == "schema":
+ return name in ["schema_one", "schema_two"]
+ else:
+ return True
+
+ context.configure(
+ # ...
+ include_schemas = True,
+ include_name = include_name
+ )
+
+ .. seealso::
+
+ :ref:`autogenerate_include_hooks`
+
+ :paramref:`.EnvironmentContext.configure.include_object`
+
+ :paramref:`.EnvironmentContext.configure.include_schemas`
+
+
+ :param include_object: A callable function which is given
+ the chance to return ``True`` or ``False`` for any object,
+ indicating if the given object should be considered in the
+ autogenerate sweep.
+
+ The function accepts the following positional arguments:
+
+ * ``object``: a :class:`~sqlalchemy.schema.SchemaItem` object such
+ as a :class:`~sqlalchemy.schema.Table`,
+ :class:`~sqlalchemy.schema.Column`,
+ :class:`~sqlalchemy.schema.Index`
+ :class:`~sqlalchemy.schema.UniqueConstraint`,
+ or :class:`~sqlalchemy.schema.ForeignKeyConstraint` object
+ * ``name``: the name of the object. This is typically available
+ via ``object.name``.
+ * ``type``: a string describing the type of object; currently
+ ``"table"``, ``"column"``, ``"index"``, ``"unique_constraint"``,
+ or ``"foreign_key_constraint"``
+ * ``reflected``: ``True`` if the given object was produced based on
+ table reflection, ``False`` if it's from a local :class:`.MetaData`
+ object.
+ * ``compare_to``: the object being compared against, if available,
+ else ``None``.
+
+ E.g.::
+
+ def include_object(object, name, type_, reflected, compare_to):
+ if (type_ == "column" and
+ not reflected and
+ object.info.get("skip_autogenerate", False)):
+ return False
+ else:
+ return True
+
+ context.configure(
+ # ...
+ include_object = include_object
+ )
+
+ For the use case of omitting specific schemas from a target database
+ when :paramref:`.EnvironmentContext.configure.include_schemas` is
+ set to ``True``, the :attr:`~sqlalchemy.schema.Table.schema`
+ attribute can be checked for each :class:`~sqlalchemy.schema.Table`
+ object passed to the hook, however it is much more efficient
+ to filter on schemas before reflection of objects takes place
+ using the :paramref:`.EnvironmentContext.configure.include_name`
+ hook.
+
+ .. seealso::
+
+ :ref:`autogenerate_include_hooks`
+
+ :paramref:`.EnvironmentContext.configure.include_name`
+
+ :paramref:`.EnvironmentContext.configure.include_schemas`
+
+ :param render_as_batch: if True, commands which alter elements
+ within a table will be placed under a ``with batch_alter_table():``
+ directive, so that batch migrations will take place.
+
+ .. seealso::
+
+ :ref:`batch_migrations`
+
+ :param include_schemas: If True, autogenerate will scan across
+ all schemas located by the SQLAlchemy
+ :meth:`~sqlalchemy.engine.reflection.Inspector.get_schema_names`
+ method, and include all differences in tables found across all
+ those schemas. When using this option, you may want to also
+ use the :paramref:`.EnvironmentContext.configure.include_name`
+ parameter to specify a callable which
+ can filter the tables/schemas that get included.
+
+ .. seealso::
+
+ :ref:`autogenerate_include_hooks`
+
+ :paramref:`.EnvironmentContext.configure.include_name`
+
+ :paramref:`.EnvironmentContext.configure.include_object`
+
+ :param render_item: Callable that can be used to override how
+ any schema item, i.e. column, constraint, type,
+ etc., is rendered for autogenerate. The callable receives a
+ string describing the type of object, the object, and
+ the autogen context. If it returns False, the
+ default rendering method will be used. If it returns None,
+ the item will not be rendered in the context of a Table
+ construct, that is, can be used to skip columns or constraints
+ within op.create_table()::
+
+ def my_render_column(type_, col, autogen_context):
+ if type_ == "column" and isinstance(col, MySpecialCol):
+ return repr(col)
+ else:
+ return False
+
+ context.configure(
+ # ...
+ render_item = my_render_column
+ )
+
+ Available values for the type string include: ``"column"``,
+ ``"primary_key"``, ``"foreign_key"``, ``"unique"``, ``"check"``,
+ ``"type"``, ``"server_default"``.
+
+ .. seealso::
+
+ :ref:`autogen_render_types`
+
+ :param upgrade_token: When autogenerate completes, the text of the
+ candidate upgrade operations will be present in this template
+ variable when ``script.py.mako`` is rendered. Defaults to
+ ``upgrades``.
+ :param downgrade_token: When autogenerate completes, the text of the
+ candidate downgrade operations will be present in this
+ template variable when ``script.py.mako`` is rendered. Defaults to
+ ``downgrades``.
+
+ :param alembic_module_prefix: When autogenerate refers to Alembic
+ :mod:`alembic.operations` constructs, this prefix will be used
+ (i.e. ``op.create_table``) Defaults to "``op.``".
+ Can be ``None`` to indicate no prefix.
+
+ :param sqlalchemy_module_prefix: When autogenerate refers to
+ SQLAlchemy
+ :class:`~sqlalchemy.schema.Column` or type classes, this prefix
+ will be used
+ (i.e. ``sa.Column("somename", sa.Integer)``) Defaults to "``sa.``".
+ Can be ``None`` to indicate no prefix.
+ Note that when dialect-specific types are rendered, autogenerate
+ will render them using the dialect module name, i.e. ``mssql.BIT()``,
+ ``postgresql.UUID()``.
+
+ :param user_module_prefix: When autogenerate refers to a SQLAlchemy
+ type (e.g. :class:`.TypeEngine`) where the module name is not
+ under the ``sqlalchemy`` namespace, this prefix will be used
+ within autogenerate. If left at its default of
+ ``None``, the ``__module__`` attribute of the type is used to
+ render the import module. It's a good practice to set this
+ and to have all custom types be available from a fixed module space,
+ in order to future-proof migration files against reorganizations
+ in modules.
+
+ .. seealso::
+
+ :ref:`autogen_module_prefix`
+
+ :param process_revision_directives: a callable function that will
+ be passed a structure representing the end result of an autogenerate
+ or plain "revision" operation, which can be manipulated to affect
+ how the ``alembic revision`` command ultimately outputs new
+ revision scripts. The structure of the callable is::
+
+ def process_revision_directives(context, revision, directives):
+ pass
+
+ The ``directives`` parameter is a Python list containing
+ a single :class:`.MigrationScript` directive, which represents
+ the revision file to be generated. This list as well as its
+ contents may be freely modified to produce any set of commands.
+ The section :ref:`customizing_revision` shows an example of
+ doing this. The ``context`` parameter is the
+ :class:`.MigrationContext` in use,
+ and ``revision`` is a tuple of revision identifiers representing the
+ current revision of the database.
+
+ The callable is invoked at all times when the ``--autogenerate``
+ option is passed to ``alembic revision``. If ``--autogenerate``
+ is not passed, the callable is invoked only if the
+ ``revision_environment`` variable is set to True in the Alembic
+ configuration, in which case the given ``directives`` collection
+ will contain empty :class:`.UpgradeOps` and :class:`.DowngradeOps`
+ collections for ``.upgrade_ops`` and ``.downgrade_ops``. The
+ ``--autogenerate`` option itself can be inferred by inspecting
+ ``context.config.cmd_opts.autogenerate``.
+
+ The callable function may optionally be an instance of
+ a :class:`.Rewriter` object. This is a helper object that
+ assists in the production of autogenerate-stream rewriter functions.
+
+ .. seealso::
+
+ :ref:`customizing_revision`
+
+ :ref:`autogen_rewriter`
+
+ :paramref:`.command.revision.process_revision_directives`
+
+ Parameters specific to individual backends:
+
+ :param mssql_batch_separator: The "batch separator" which will
+ be placed between each statement when generating offline SQL Server
+ migrations. Defaults to ``GO``. Note this is in addition to the
+ customary semicolon ``;`` at the end of each statement; SQL Server
+ considers the "batch separator" to denote the end of an
+ individual statement execution, and cannot group certain
+ dependent operations in one step.
+ :param oracle_batch_separator: The "batch separator" which will
+ be placed between each statement when generating offline
+ Oracle migrations. Defaults to ``/``. Oracle doesn't add a
+ semicolon between statements like most other backends.
+
+ """
+ opts = self.context_opts
+ if transactional_ddl is not None:
+ opts["transactional_ddl"] = transactional_ddl
+ if output_buffer is not None:
+ opts["output_buffer"] = output_buffer
+ elif self.config.output_buffer is not None:
+ opts["output_buffer"] = self.config.output_buffer
+ if starting_rev:
+ opts["starting_rev"] = starting_rev
+ if tag:
+ opts["tag"] = tag
+ if template_args and "template_args" in opts:
+ opts["template_args"].update(template_args)
+ opts["transaction_per_migration"] = transaction_per_migration
+ opts["target_metadata"] = target_metadata
+ opts["include_name"] = include_name
+ opts["include_object"] = include_object
+ opts["include_schemas"] = include_schemas
+ opts["render_as_batch"] = render_as_batch
+ opts["upgrade_token"] = upgrade_token
+ opts["downgrade_token"] = downgrade_token
+ opts["sqlalchemy_module_prefix"] = sqlalchemy_module_prefix
+ opts["alembic_module_prefix"] = alembic_module_prefix
+ opts["user_module_prefix"] = user_module_prefix
+ opts["literal_binds"] = literal_binds
+ opts["process_revision_directives"] = process_revision_directives
+ opts["on_version_apply"] = util.to_tuple(on_version_apply, default=())
+
+ if render_item is not None:
+ opts["render_item"] = render_item
+ opts["compare_type"] = compare_type
+ if compare_server_default is not None:
+ opts["compare_server_default"] = compare_server_default
+ opts["script"] = self.script
+
+ opts.update(kw)
+
+ self._migration_context = MigrationContext.configure(
+ connection=connection,
+ url=url,
+ dialect_name=dialect_name,
+ environment_context=self,
+ dialect_opts=dialect_opts,
+ opts=opts,
+ )
+
+ def run_migrations(self, **kw: Any) -> None:
+ """Run migrations as determined by the current command line
+ configuration
+ as well as versioning information present (or not) in the current
+ database connection (if one is present).
+
+ The function accepts optional ``**kw`` arguments. If these are
+ passed, they are sent directly to the ``upgrade()`` and
+ ``downgrade()``
+ functions within each target revision file. By modifying the
+ ``script.py.mako`` file so that the ``upgrade()`` and ``downgrade()``
+ functions accept arguments, parameters can be passed here so that
+ contextual information, usually information to identify a particular
+ database in use, can be passed from a custom ``env.py`` script
+ to the migration functions.
+
+ This function requires that a :class:`.MigrationContext` has
+ first been made available via :meth:`.configure`.
+
+ """
+ assert self._migration_context is not None
+ with Operations.context(self._migration_context):
+ self.get_context().run_migrations(**kw)
+
+ def execute(
+ self,
+ sql: Union[Executable, str],
+ execution_options: Optional[dict] = None,
+ ) -> None:
+ """Execute the given SQL using the current change context.
+
+ The behavior of :meth:`.execute` is the same
+ as that of :meth:`.Operations.execute`. Please see that
+ function's documentation for full detail including
+ caveats and limitations.
+
+ This function requires that a :class:`.MigrationContext` has
+ first been made available via :meth:`.configure`.
+
+ """
+ self.get_context().execute(sql, execution_options=execution_options)
+
+ def static_output(self, text: str) -> None:
+ """Emit text directly to the "offline" SQL stream.
+
+ Typically this is for emitting comments that
+ start with --. The statement is not treated
+ as a SQL execution, no ; or batch separator
+ is added, etc.
+
+ """
+ self.get_context().impl.static_output(text)
+
+ def begin_transaction(
+ self,
+ ) -> Union[_ProxyTransaction, ContextManager[None]]:
+ """Return a context manager that will
+ enclose an operation within a "transaction",
+ as defined by the environment's offline
+ and transactional DDL settings.
+
+ e.g.::
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+ :meth:`.begin_transaction` is intended to
+ "do the right thing" regardless of
+ calling context:
+
+ * If :meth:`.is_transactional_ddl` is ``False``,
+ returns a "do nothing" context manager
+ which otherwise produces no transactional
+ state or directives.
+ * If :meth:`.is_offline_mode` is ``True``,
+ returns a context manager that will
+ invoke the :meth:`.DefaultImpl.emit_begin`
+ and :meth:`.DefaultImpl.emit_commit`
+ methods, which will produce the string
+ directives ``BEGIN`` and ``COMMIT`` on
+ the output stream, as rendered by the
+ target backend (e.g. SQL Server would
+ emit ``BEGIN TRANSACTION``).
+ * Otherwise, calls :meth:`sqlalchemy.engine.Connection.begin`
+ on the current online connection, which
+ returns a :class:`sqlalchemy.engine.Transaction`
+ object. This object demarcates a real
+ transaction and is itself a context manager,
+ which will roll back if an exception
+ is raised.
+
+ Note that a custom ``env.py`` script which
+ has more specific transactional needs can of course
+ manipulate the :class:`~sqlalchemy.engine.Connection`
+ directly to produce transactional state in "online"
+ mode.
+
+ """
+
+ return self.get_context().begin_transaction()
+
+ def get_context(self) -> MigrationContext:
+ """Return the current :class:`.MigrationContext` object.
+
+ If :meth:`.EnvironmentContext.configure` has not been
+ called yet, raises an exception.
+
+ """
+
+ if self._migration_context is None:
+ raise Exception("No context has been configured yet.")
+ return self._migration_context
+
+ def get_bind(self) -> Connection:
+ """Return the current 'bind'.
+
+ In "online" mode, this is the
+ :class:`sqlalchemy.engine.Connection` currently being used
+ to emit SQL to the database.
+
+ This function requires that a :class:`.MigrationContext`
+ has first been made available via :meth:`.configure`.
+
+ """
+ return self.get_context().bind # type: ignore[return-value]
+
+ def get_impl(self) -> DefaultImpl:
+ return self.get_context().impl
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/runtime/migration.py b/Backend/venv/lib/python3.12/site-packages/alembic/runtime/migration.py
new file mode 100644
index 00000000..24e3d644
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/runtime/migration.py
@@ -0,0 +1,1380 @@
+from __future__ import annotations
+
+from contextlib import contextmanager
+from contextlib import nullcontext
+import logging
+import sys
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Collection
+from typing import ContextManager
+from typing import Dict
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Optional
+from typing import Set
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+from sqlalchemy import Column
+from sqlalchemy import literal_column
+from sqlalchemy import MetaData
+from sqlalchemy import PrimaryKeyConstraint
+from sqlalchemy import String
+from sqlalchemy import Table
+from sqlalchemy.engine import Engine
+from sqlalchemy.engine import url as sqla_url
+from sqlalchemy.engine.strategies import MockEngineStrategy
+
+from .. import ddl
+from .. import util
+from ..util import sqla_compat
+from ..util.compat import EncodedIO
+
+if TYPE_CHECKING:
+ from sqlalchemy.engine import Dialect
+ from sqlalchemy.engine import URL
+ from sqlalchemy.engine.base import Connection
+ from sqlalchemy.engine.base import Transaction
+ from sqlalchemy.engine.mock import MockConnection
+ from sqlalchemy.sql import Executable
+
+ from .environment import EnvironmentContext
+ from ..config import Config
+ from ..script.base import Script
+ from ..script.base import ScriptDirectory
+ from ..script.revision import _RevisionOrBase
+ from ..script.revision import Revision
+ from ..script.revision import RevisionMap
+
+log = logging.getLogger(__name__)
+
+
+class _ProxyTransaction:
+ def __init__(self, migration_context: MigrationContext) -> None:
+ self.migration_context = migration_context
+
+ @property
+ def _proxied_transaction(self) -> Optional[Transaction]:
+ return self.migration_context._transaction
+
+ def rollback(self) -> None:
+ t = self._proxied_transaction
+ assert t is not None
+ t.rollback()
+ self.migration_context._transaction = None
+
+ def commit(self) -> None:
+ t = self._proxied_transaction
+ assert t is not None
+ t.commit()
+ self.migration_context._transaction = None
+
+ def __enter__(self) -> _ProxyTransaction:
+ return self
+
+ def __exit__(self, type_: Any, value: Any, traceback: Any) -> None:
+ if self._proxied_transaction is not None:
+ self._proxied_transaction.__exit__(type_, value, traceback)
+ self.migration_context._transaction = None
+
+
+class MigrationContext:
+
+ """Represent the database state made available to a migration
+ script.
+
+ :class:`.MigrationContext` is the front end to an actual
+ database connection, or alternatively a string output
+ stream given a particular database dialect,
+ from an Alembic perspective.
+
+ When inside the ``env.py`` script, the :class:`.MigrationContext`
+ is available via the
+ :meth:`.EnvironmentContext.get_context` method,
+ which is available at ``alembic.context``::
+
+ # from within env.py script
+ from alembic import context
+
+ migration_context = context.get_context()
+
+ For usage outside of an ``env.py`` script, such as for
+ utility routines that want to check the current version
+ in the database, the :meth:`.MigrationContext.configure`
+ method to create new :class:`.MigrationContext` objects.
+ For example, to get at the current revision in the
+ database using :meth:`.MigrationContext.get_current_revision`::
+
+ # in any application, outside of an env.py script
+ from alembic.migration import MigrationContext
+ from sqlalchemy import create_engine
+
+ engine = create_engine("postgresql://mydatabase")
+ conn = engine.connect()
+
+ context = MigrationContext.configure(conn)
+ current_rev = context.get_current_revision()
+
+ The above context can also be used to produce
+ Alembic migration operations with an :class:`.Operations`
+ instance::
+
+ # in any application, outside of the normal Alembic environment
+ from alembic.operations import Operations
+
+ op = Operations(context)
+ op.alter_column("mytable", "somecolumn", nullable=True)
+
+ """
+
+ def __init__(
+ self,
+ dialect: Dialect,
+ connection: Optional[Connection],
+ opts: Dict[str, Any],
+ environment_context: Optional[EnvironmentContext] = None,
+ ) -> None:
+ self.environment_context = environment_context
+ self.opts = opts
+ self.dialect = dialect
+ self.script: Optional[ScriptDirectory] = opts.get("script")
+ as_sql: bool = opts.get("as_sql", False)
+ transactional_ddl = opts.get("transactional_ddl")
+ self._transaction_per_migration = opts.get(
+ "transaction_per_migration", False
+ )
+ self.on_version_apply_callbacks = opts.get("on_version_apply", ())
+ self._transaction: Optional[Transaction] = None
+
+ if as_sql:
+ self.connection = cast(
+ Optional["Connection"], self._stdout_connection(connection)
+ )
+ assert self.connection is not None
+ self._in_external_transaction = False
+ else:
+ self.connection = connection
+ self._in_external_transaction = (
+ sqla_compat._get_connection_in_transaction(connection)
+ )
+
+ self._migrations_fn: Optional[
+ Callable[..., Iterable[RevisionStep]]
+ ] = opts.get("fn")
+ self.as_sql = as_sql
+
+ self.purge = opts.get("purge", False)
+
+ if "output_encoding" in opts:
+ self.output_buffer = EncodedIO(
+ opts.get("output_buffer")
+ or sys.stdout, # type:ignore[arg-type]
+ opts["output_encoding"],
+ )
+ else:
+ self.output_buffer = opts.get("output_buffer", sys.stdout)
+
+ self._user_compare_type = opts.get("compare_type", True)
+ self._user_compare_server_default = opts.get(
+ "compare_server_default", False
+ )
+ self.version_table = version_table = opts.get(
+ "version_table", "alembic_version"
+ )
+ self.version_table_schema = version_table_schema = opts.get(
+ "version_table_schema", None
+ )
+ self._version = Table(
+ version_table,
+ MetaData(),
+ Column("version_num", String(32), nullable=False),
+ schema=version_table_schema,
+ )
+ if opts.get("version_table_pk", True):
+ self._version.append_constraint(
+ PrimaryKeyConstraint(
+ "version_num", name="%s_pkc" % version_table
+ )
+ )
+
+ self._start_from_rev: Optional[str] = opts.get("starting_rev")
+ self.impl = ddl.DefaultImpl.get_by_dialect(dialect)(
+ dialect,
+ self.connection,
+ self.as_sql,
+ transactional_ddl,
+ self.output_buffer,
+ opts,
+ )
+ log.info("Context impl %s.", self.impl.__class__.__name__)
+ if self.as_sql:
+ log.info("Generating static SQL")
+ log.info(
+ "Will assume %s DDL.",
+ "transactional"
+ if self.impl.transactional_ddl
+ else "non-transactional",
+ )
+
+ @classmethod
+ def configure(
+ cls,
+ connection: Optional[Connection] = None,
+ url: Optional[Union[str, URL]] = None,
+ dialect_name: Optional[str] = None,
+ dialect: Optional[Dialect] = None,
+ environment_context: Optional[EnvironmentContext] = None,
+ dialect_opts: Optional[Dict[str, str]] = None,
+ opts: Optional[Any] = None,
+ ) -> MigrationContext:
+ """Create a new :class:`.MigrationContext`.
+
+ This is a factory method usually called
+ by :meth:`.EnvironmentContext.configure`.
+
+ :param connection: a :class:`~sqlalchemy.engine.Connection`
+ to use for SQL execution in "online" mode. When present,
+ is also used to determine the type of dialect in use.
+ :param url: a string database url, or a
+ :class:`sqlalchemy.engine.url.URL` object.
+ The type of dialect to be used will be derived from this if
+ ``connection`` is not passed.
+ :param dialect_name: string name of a dialect, such as
+ "postgresql", "mssql", etc. The type of dialect to be used will be
+ derived from this if ``connection`` and ``url`` are not passed.
+ :param opts: dictionary of options. Most other options
+ accepted by :meth:`.EnvironmentContext.configure` are passed via
+ this dictionary.
+
+ """
+ if opts is None:
+ opts = {}
+ if dialect_opts is None:
+ dialect_opts = {}
+
+ if connection:
+ if isinstance(connection, Engine):
+ raise util.CommandError(
+ "'connection' argument to configure() is expected "
+ "to be a sqlalchemy.engine.Connection instance, "
+ "got %r" % connection,
+ )
+
+ dialect = connection.dialect
+ elif url:
+ url_obj = sqla_url.make_url(url)
+ dialect = url_obj.get_dialect()(**dialect_opts)
+ elif dialect_name:
+ url_obj = sqla_url.make_url("%s://" % dialect_name)
+ dialect = url_obj.get_dialect()(**dialect_opts)
+ elif not dialect:
+ raise Exception("Connection, url, or dialect_name is required.")
+ assert dialect is not None
+ return MigrationContext(dialect, connection, opts, environment_context)
+
+ @contextmanager
+ def autocommit_block(self) -> Iterator[None]:
+ """Enter an "autocommit" block, for databases that support AUTOCOMMIT
+ isolation levels.
+
+ This special directive is intended to support the occasional database
+ DDL or system operation that specifically has to be run outside of
+ any kind of transaction block. The PostgreSQL database platform
+ is the most common target for this style of operation, as many
+ of its DDL operations must be run outside of transaction blocks, even
+ though the database overall supports transactional DDL.
+
+ The method is used as a context manager within a migration script, by
+ calling on :meth:`.Operations.get_context` to retrieve the
+ :class:`.MigrationContext`, then invoking
+ :meth:`.MigrationContext.autocommit_block` using the ``with:``
+ statement::
+
+ def upgrade():
+ with op.get_context().autocommit_block():
+ op.execute("ALTER TYPE mood ADD VALUE 'soso'")
+
+ Above, a PostgreSQL "ALTER TYPE..ADD VALUE" directive is emitted,
+ which must be run outside of a transaction block at the database level.
+ The :meth:`.MigrationContext.autocommit_block` method makes use of the
+ SQLAlchemy ``AUTOCOMMIT`` isolation level setting, which against the
+ psycogp2 DBAPI corresponds to the ``connection.autocommit`` setting,
+ to ensure that the database driver is not inside of a DBAPI level
+ transaction block.
+
+ .. warning::
+
+ As is necessary, **the database transaction preceding the block is
+ unconditionally committed**. This means that the run of migrations
+ preceding the operation will be committed, before the overall
+ migration operation is complete.
+
+ It is recommended that when an application includes migrations with
+ "autocommit" blocks, that
+ :paramref:`.EnvironmentContext.transaction_per_migration` be used
+ so that the calling environment is tuned to expect short per-file
+ migrations whether or not one of them has an autocommit block.
+
+
+ """
+ _in_connection_transaction = self._in_connection_transaction()
+
+ if self.impl.transactional_ddl and self.as_sql:
+ self.impl.emit_commit()
+
+ elif _in_connection_transaction:
+ assert self._transaction is not None
+
+ self._transaction.commit()
+ self._transaction = None
+
+ if not self.as_sql:
+ assert self.connection is not None
+ current_level = self.connection.get_isolation_level()
+ base_connection = self.connection
+
+ # in 1.3 and 1.4 non-future mode, the connection gets switched
+ # out. we can use the base connection with the new mode
+ # except that it will not know it's in "autocommit" and will
+ # emit deprecation warnings when an autocommit action takes
+ # place.
+ self.connection = (
+ self.impl.connection
+ ) = base_connection.execution_options(isolation_level="AUTOCOMMIT")
+
+ # sqlalchemy future mode will "autobegin" in any case, so take
+ # control of that "transaction" here
+ fake_trans: Optional[Transaction] = self.connection.begin()
+ else:
+ fake_trans = None
+ try:
+ yield
+ finally:
+ if not self.as_sql:
+ assert self.connection is not None
+ if fake_trans is not None:
+ fake_trans.commit()
+ self.connection.execution_options(
+ isolation_level=current_level
+ )
+ self.connection = self.impl.connection = base_connection
+
+ if self.impl.transactional_ddl and self.as_sql:
+ self.impl.emit_begin()
+
+ elif _in_connection_transaction:
+ assert self.connection is not None
+ self._transaction = self.connection.begin()
+
+ def begin_transaction(
+ self, _per_migration: bool = False
+ ) -> Union[_ProxyTransaction, ContextManager[None]]:
+ """Begin a logical transaction for migration operations.
+
+ This method is used within an ``env.py`` script to demarcate where
+ the outer "transaction" for a series of migrations begins. Example::
+
+ def run_migrations_online():
+ connectable = create_engine(...)
+
+ with connectable.connect() as connection:
+ context.configure(
+ connection=connection, target_metadata=target_metadata
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+ Above, :meth:`.MigrationContext.begin_transaction` is used to demarcate
+ where the outer logical transaction occurs around the
+ :meth:`.MigrationContext.run_migrations` operation.
+
+ A "Logical" transaction means that the operation may or may not
+ correspond to a real database transaction. If the target database
+ supports transactional DDL (or
+ :paramref:`.EnvironmentContext.configure.transactional_ddl` is true),
+ the :paramref:`.EnvironmentContext.configure.transaction_per_migration`
+ flag is not set, and the migration is against a real database
+ connection (as opposed to using "offline" ``--sql`` mode), a real
+ transaction will be started. If ``--sql`` mode is in effect, the
+ operation would instead correspond to a string such as "BEGIN" being
+ emitted to the string output.
+
+ The returned object is a Python context manager that should only be
+ used in the context of a ``with:`` statement as indicated above.
+ The object has no other guaranteed API features present.
+
+ .. seealso::
+
+ :meth:`.MigrationContext.autocommit_block`
+
+ """
+
+ if self._in_external_transaction:
+ return nullcontext()
+
+ if self.impl.transactional_ddl:
+ transaction_now = _per_migration == self._transaction_per_migration
+ else:
+ transaction_now = _per_migration is True
+
+ if not transaction_now:
+ return nullcontext()
+
+ elif not self.impl.transactional_ddl:
+ assert _per_migration
+
+ if self.as_sql:
+ return nullcontext()
+ else:
+ # track our own notion of a "transaction block", which must be
+ # committed when complete. Don't rely upon whether or not the
+ # SQLAlchemy connection reports as "in transaction"; this
+ # because SQLAlchemy future connection features autobegin
+ # behavior, so it may already be in a transaction from our
+ # emitting of queries like "has_version_table", etc. While we
+ # could track these operations as well, that leaves open the
+ # possibility of new operations or other things happening in
+ # the user environment that still may be triggering
+ # "autobegin".
+
+ in_transaction = self._transaction is not None
+
+ if in_transaction:
+ return nullcontext()
+ else:
+ assert self.connection is not None
+ self._transaction = (
+ sqla_compat._safe_begin_connection_transaction(
+ self.connection
+ )
+ )
+ return _ProxyTransaction(self)
+ elif self.as_sql:
+
+ @contextmanager
+ def begin_commit():
+ self.impl.emit_begin()
+ yield
+ self.impl.emit_commit()
+
+ return begin_commit()
+ else:
+ assert self.connection is not None
+ self._transaction = sqla_compat._safe_begin_connection_transaction(
+ self.connection
+ )
+ return _ProxyTransaction(self)
+
+ def get_current_revision(self) -> Optional[str]:
+ """Return the current revision, usually that which is present
+ in the ``alembic_version`` table in the database.
+
+ This method intends to be used only for a migration stream that
+ does not contain unmerged branches in the target database;
+ if there are multiple branches present, an exception is raised.
+ The :meth:`.MigrationContext.get_current_heads` should be preferred
+ over this method going forward in order to be compatible with
+ branch migration support.
+
+ If this :class:`.MigrationContext` was configured in "offline"
+ mode, that is with ``as_sql=True``, the ``starting_rev``
+ parameter is returned instead, if any.
+
+ """
+ heads = self.get_current_heads()
+ if len(heads) == 0:
+ return None
+ elif len(heads) > 1:
+ raise util.CommandError(
+ "Version table '%s' has more than one head present; "
+ "please use get_current_heads()" % self.version_table
+ )
+ else:
+ return heads[0]
+
+ def get_current_heads(self) -> Tuple[str, ...]:
+ """Return a tuple of the current 'head versions' that are represented
+ in the target database.
+
+ For a migration stream without branches, this will be a single
+ value, synonymous with that of
+ :meth:`.MigrationContext.get_current_revision`. However when multiple
+ unmerged branches exist within the target database, the returned tuple
+ will contain a value for each head.
+
+ If this :class:`.MigrationContext` was configured in "offline"
+ mode, that is with ``as_sql=True``, the ``starting_rev``
+ parameter is returned in a one-length tuple.
+
+ If no version table is present, or if there are no revisions
+ present, an empty tuple is returned.
+
+ """
+ if self.as_sql:
+ start_from_rev: Any = self._start_from_rev
+ if start_from_rev == "base":
+ start_from_rev = None
+ elif start_from_rev is not None and self.script:
+ start_from_rev = [
+ cast("Script", self.script.get_revision(sfr)).revision
+ for sfr in util.to_list(start_from_rev)
+ if sfr not in (None, "base")
+ ]
+ return util.to_tuple(start_from_rev, default=())
+ else:
+ if self._start_from_rev:
+ raise util.CommandError(
+ "Can't specify current_rev to context "
+ "when using a database connection"
+ )
+ if not self._has_version_table():
+ return ()
+ assert self.connection is not None
+ return tuple(
+ row[0] for row in self.connection.execute(self._version.select())
+ )
+
+ def _ensure_version_table(self, purge: bool = False) -> None:
+ with sqla_compat._ensure_scope_for_ddl(self.connection):
+ assert self.connection is not None
+ self._version.create(self.connection, checkfirst=True)
+ if purge:
+ assert self.connection is not None
+ self.connection.execute(self._version.delete())
+
+ def _has_version_table(self) -> bool:
+ assert self.connection is not None
+ return sqla_compat._connectable_has_table(
+ self.connection, self.version_table, self.version_table_schema
+ )
+
+ def stamp(self, script_directory: ScriptDirectory, revision: str) -> None:
+ """Stamp the version table with a specific revision.
+
+ This method calculates those branches to which the given revision
+ can apply, and updates those branches as though they were migrated
+ towards that revision (either up or down). If no current branches
+ include the revision, it is added as a new branch head.
+
+ """
+ heads = self.get_current_heads()
+ if not self.as_sql and not heads:
+ self._ensure_version_table()
+ head_maintainer = HeadMaintainer(self, heads)
+ for step in script_directory._stamp_revs(revision, heads):
+ head_maintainer.update_to_step(step)
+
+ def run_migrations(self, **kw: Any) -> None:
+ r"""Run the migration scripts established for this
+ :class:`.MigrationContext`, if any.
+
+ The commands in :mod:`alembic.command` will set up a function
+ that is ultimately passed to the :class:`.MigrationContext`
+ as the ``fn`` argument. This function represents the "work"
+ that will be done when :meth:`.MigrationContext.run_migrations`
+ is called, typically from within the ``env.py`` script of the
+ migration environment. The "work function" then provides an iterable
+ of version callables and other version information which
+ in the case of the ``upgrade`` or ``downgrade`` commands are the
+ list of version scripts to invoke. Other commands yield nothing,
+ in the case that a command wants to run some other operation
+ against the database such as the ``current`` or ``stamp`` commands.
+
+ :param \**kw: keyword arguments here will be passed to each
+ migration callable, that is the ``upgrade()`` or ``downgrade()``
+ method within revision scripts.
+
+ """
+ self.impl.start_migrations()
+
+ heads: Tuple[str, ...]
+ if self.purge:
+ if self.as_sql:
+ raise util.CommandError("Can't use --purge with --sql mode")
+ self._ensure_version_table(purge=True)
+ heads = ()
+ else:
+ heads = self.get_current_heads()
+
+ dont_mutate = self.opts.get("dont_mutate", False)
+
+ if not self.as_sql and not heads and not dont_mutate:
+ self._ensure_version_table()
+
+ head_maintainer = HeadMaintainer(self, heads)
+
+ assert self._migrations_fn is not None
+ for step in self._migrations_fn(heads, self):
+ with self.begin_transaction(_per_migration=True):
+ if self.as_sql and not head_maintainer.heads:
+ # for offline mode, include a CREATE TABLE from
+ # the base
+ assert self.connection is not None
+ self._version.create(self.connection)
+ log.info("Running %s", step)
+ if self.as_sql:
+ self.impl.static_output(
+ "-- Running %s" % (step.short_log,)
+ )
+ step.migration_fn(**kw)
+
+ # previously, we wouldn't stamp per migration
+ # if we were in a transaction, however given the more
+ # complex model that involves any number of inserts
+ # and row-targeted updates and deletes, it's simpler for now
+ # just to run the operations on every version
+ head_maintainer.update_to_step(step)
+ for callback in self.on_version_apply_callbacks:
+ callback(
+ ctx=self,
+ step=step.info,
+ heads=set(head_maintainer.heads),
+ run_args=kw,
+ )
+
+ if self.as_sql and not head_maintainer.heads:
+ assert self.connection is not None
+ self._version.drop(self.connection)
+
+ def _in_connection_transaction(self) -> bool:
+ try:
+ meth = self.connection.in_transaction # type:ignore[union-attr]
+ except AttributeError:
+ return False
+ else:
+ return meth()
+
+ def execute(
+ self,
+ sql: Union[Executable, str],
+ execution_options: Optional[dict] = None,
+ ) -> None:
+ """Execute a SQL construct or string statement.
+
+ The underlying execution mechanics are used, that is
+ if this is "offline mode" the SQL is written to the
+ output buffer, otherwise the SQL is emitted on
+ the current SQLAlchemy connection.
+
+ """
+ self.impl._exec(sql, execution_options)
+
+ def _stdout_connection(
+ self, connection: Optional[Connection]
+ ) -> MockConnection:
+ def dump(construct, *multiparams, **params):
+ self.impl._exec(construct)
+
+ return MockEngineStrategy.MockConnection(self.dialect, dump)
+
+ @property
+ def bind(self) -> Optional[Connection]:
+ """Return the current "bind".
+
+ In online mode, this is an instance of
+ :class:`sqlalchemy.engine.Connection`, and is suitable
+ for ad-hoc execution of any kind of usage described
+ in SQLAlchemy Core documentation as well as
+ for usage with the :meth:`sqlalchemy.schema.Table.create`
+ and :meth:`sqlalchemy.schema.MetaData.create_all` methods
+ of :class:`~sqlalchemy.schema.Table`,
+ :class:`~sqlalchemy.schema.MetaData`.
+
+ Note that when "standard output" mode is enabled,
+ this bind will be a "mock" connection handler that cannot
+ return results and is only appropriate for a very limited
+ subset of commands.
+
+ """
+ return self.connection
+
+ @property
+ def config(self) -> Optional[Config]:
+ """Return the :class:`.Config` used by the current environment,
+ if any."""
+
+ if self.environment_context:
+ return self.environment_context.config
+ else:
+ return None
+
+ def _compare_type(
+ self, inspector_column: Column[Any], metadata_column: Column
+ ) -> bool:
+ if self._user_compare_type is False:
+ return False
+
+ if callable(self._user_compare_type):
+ user_value = self._user_compare_type(
+ self,
+ inspector_column,
+ metadata_column,
+ inspector_column.type,
+ metadata_column.type,
+ )
+ if user_value is not None:
+ return user_value
+
+ return self.impl.compare_type(inspector_column, metadata_column)
+
+ def _compare_server_default(
+ self,
+ inspector_column: Column[Any],
+ metadata_column: Column[Any],
+ rendered_metadata_default: Optional[str],
+ rendered_column_default: Optional[str],
+ ) -> bool:
+ if self._user_compare_server_default is False:
+ return False
+
+ if callable(self._user_compare_server_default):
+ user_value = self._user_compare_server_default(
+ self,
+ inspector_column,
+ metadata_column,
+ rendered_column_default,
+ metadata_column.server_default,
+ rendered_metadata_default,
+ )
+ if user_value is not None:
+ return user_value
+
+ return self.impl.compare_server_default(
+ inspector_column,
+ metadata_column,
+ rendered_metadata_default,
+ rendered_column_default,
+ )
+
+
+class HeadMaintainer:
+ def __init__(self, context: MigrationContext, heads: Any) -> None:
+ self.context = context
+ self.heads = set(heads)
+
+ def _insert_version(self, version: str) -> None:
+ assert version not in self.heads
+ self.heads.add(version)
+
+ self.context.impl._exec(
+ self.context._version.insert().values(
+ version_num=literal_column("'%s'" % version)
+ )
+ )
+
+ def _delete_version(self, version: str) -> None:
+ self.heads.remove(version)
+
+ ret = self.context.impl._exec(
+ self.context._version.delete().where(
+ self.context._version.c.version_num
+ == literal_column("'%s'" % version)
+ )
+ )
+
+ if (
+ not self.context.as_sql
+ and self.context.dialect.supports_sane_rowcount
+ and ret is not None
+ and ret.rowcount != 1
+ ):
+ raise util.CommandError(
+ "Online migration expected to match one "
+ "row when deleting '%s' in '%s'; "
+ "%d found"
+ % (version, self.context.version_table, ret.rowcount)
+ )
+
+ def _update_version(self, from_: str, to_: str) -> None:
+ assert to_ not in self.heads
+ self.heads.remove(from_)
+ self.heads.add(to_)
+
+ ret = self.context.impl._exec(
+ self.context._version.update()
+ .values(version_num=literal_column("'%s'" % to_))
+ .where(
+ self.context._version.c.version_num
+ == literal_column("'%s'" % from_)
+ )
+ )
+
+ if (
+ not self.context.as_sql
+ and self.context.dialect.supports_sane_rowcount
+ and ret is not None
+ and ret.rowcount != 1
+ ):
+ raise util.CommandError(
+ "Online migration expected to match one "
+ "row when updating '%s' to '%s' in '%s'; "
+ "%d found"
+ % (from_, to_, self.context.version_table, ret.rowcount)
+ )
+
+ def update_to_step(self, step: Union[RevisionStep, StampStep]) -> None:
+ if step.should_delete_branch(self.heads):
+ vers = step.delete_version_num
+ log.debug("branch delete %s", vers)
+ self._delete_version(vers)
+ elif step.should_create_branch(self.heads):
+ vers = step.insert_version_num
+ log.debug("new branch insert %s", vers)
+ self._insert_version(vers)
+ elif step.should_merge_branches(self.heads):
+ # delete revs, update from rev, update to rev
+ (
+ delete_revs,
+ update_from_rev,
+ update_to_rev,
+ ) = step.merge_branch_idents(self.heads)
+ log.debug(
+ "merge, delete %s, update %s to %s",
+ delete_revs,
+ update_from_rev,
+ update_to_rev,
+ )
+ for delrev in delete_revs:
+ self._delete_version(delrev)
+ self._update_version(update_from_rev, update_to_rev)
+ elif step.should_unmerge_branches(self.heads):
+ (
+ update_from_rev,
+ update_to_rev,
+ insert_revs,
+ ) = step.unmerge_branch_idents(self.heads)
+ log.debug(
+ "unmerge, insert %s, update %s to %s",
+ insert_revs,
+ update_from_rev,
+ update_to_rev,
+ )
+ for insrev in insert_revs:
+ self._insert_version(insrev)
+ self._update_version(update_from_rev, update_to_rev)
+ else:
+ from_, to_ = step.update_version_num(self.heads)
+ log.debug("update %s to %s", from_, to_)
+ self._update_version(from_, to_)
+
+
+class MigrationInfo:
+ """Exposes information about a migration step to a callback listener.
+
+ The :class:`.MigrationInfo` object is available exclusively for the
+ benefit of the :paramref:`.EnvironmentContext.on_version_apply`
+ callback hook.
+
+ """
+
+ is_upgrade: bool
+ """True/False: indicates whether this operation ascends or descends the
+ version tree."""
+
+ is_stamp: bool
+ """True/False: indicates whether this operation is a stamp (i.e. whether
+ it results in any actual database operations)."""
+
+ up_revision_id: Optional[str]
+ """Version string corresponding to :attr:`.Revision.revision`.
+
+ In the case of a stamp operation, it is advised to use the
+ :attr:`.MigrationInfo.up_revision_ids` tuple as a stamp operation can
+ make a single movement from one or more branches down to a single
+ branchpoint, in which case there will be multiple "up" revisions.
+
+ .. seealso::
+
+ :attr:`.MigrationInfo.up_revision_ids`
+
+ """
+
+ up_revision_ids: Tuple[str, ...]
+ """Tuple of version strings corresponding to :attr:`.Revision.revision`.
+
+ In the majority of cases, this tuple will be a single value, synonymous
+ with the scalar value of :attr:`.MigrationInfo.up_revision_id`.
+ It can be multiple revision identifiers only in the case of an
+ ``alembic stamp`` operation which is moving downwards from multiple
+ branches down to their common branch point.
+
+ """
+
+ down_revision_ids: Tuple[str, ...]
+ """Tuple of strings representing the base revisions of this migration step.
+
+ If empty, this represents a root revision; otherwise, the first item
+ corresponds to :attr:`.Revision.down_revision`, and the rest are inferred
+ from dependencies.
+ """
+
+ revision_map: RevisionMap
+ """The revision map inside of which this operation occurs."""
+
+ def __init__(
+ self,
+ revision_map: RevisionMap,
+ is_upgrade: bool,
+ is_stamp: bool,
+ up_revisions: Union[str, Tuple[str, ...]],
+ down_revisions: Union[str, Tuple[str, ...]],
+ ) -> None:
+ self.revision_map = revision_map
+ self.is_upgrade = is_upgrade
+ self.is_stamp = is_stamp
+ self.up_revision_ids = util.to_tuple(up_revisions, default=())
+ if self.up_revision_ids:
+ self.up_revision_id = self.up_revision_ids[0]
+ else:
+ # this should never be the case with
+ # "upgrade", "downgrade", or "stamp" as we are always
+ # measuring movement in terms of at least one upgrade version
+ self.up_revision_id = None
+ self.down_revision_ids = util.to_tuple(down_revisions, default=())
+
+ @property
+ def is_migration(self) -> bool:
+ """True/False: indicates whether this operation is a migration.
+
+ At present this is true if and only the migration is not a stamp.
+ If other operation types are added in the future, both this attribute
+ and :attr:`~.MigrationInfo.is_stamp` will be false.
+ """
+ return not self.is_stamp
+
+ @property
+ def source_revision_ids(self) -> Tuple[str, ...]:
+ """Active revisions before this migration step is applied."""
+ return (
+ self.down_revision_ids if self.is_upgrade else self.up_revision_ids
+ )
+
+ @property
+ def destination_revision_ids(self) -> Tuple[str, ...]:
+ """Active revisions after this migration step is applied."""
+ return (
+ self.up_revision_ids if self.is_upgrade else self.down_revision_ids
+ )
+
+ @property
+ def up_revision(self) -> Optional[Revision]:
+ """Get :attr:`~.MigrationInfo.up_revision_id` as
+ a :class:`.Revision`.
+
+ """
+ return self.revision_map.get_revision(self.up_revision_id)
+
+ @property
+ def up_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]:
+ """Get :attr:`~.MigrationInfo.up_revision_ids` as a
+ :class:`.Revision`."""
+ return self.revision_map.get_revisions(self.up_revision_ids)
+
+ @property
+ def down_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]:
+ """Get :attr:`~.MigrationInfo.down_revision_ids` as a tuple of
+ :class:`Revisions <.Revision>`."""
+ return self.revision_map.get_revisions(self.down_revision_ids)
+
+ @property
+ def source_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]:
+ """Get :attr:`~MigrationInfo.source_revision_ids` as a tuple of
+ :class:`Revisions <.Revision>`."""
+ return self.revision_map.get_revisions(self.source_revision_ids)
+
+ @property
+ def destination_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]:
+ """Get :attr:`~MigrationInfo.destination_revision_ids` as a tuple of
+ :class:`Revisions <.Revision>`."""
+ return self.revision_map.get_revisions(self.destination_revision_ids)
+
+
+class MigrationStep:
+ from_revisions_no_deps: Tuple[str, ...]
+ to_revisions_no_deps: Tuple[str, ...]
+ is_upgrade: bool
+ migration_fn: Any
+
+ @property
+ def name(self) -> str:
+ return self.migration_fn.__name__
+
+ @classmethod
+ def upgrade_from_script(
+ cls, revision_map: RevisionMap, script: Script
+ ) -> RevisionStep:
+ return RevisionStep(revision_map, script, True)
+
+ @classmethod
+ def downgrade_from_script(
+ cls, revision_map: RevisionMap, script: Script
+ ) -> RevisionStep:
+ return RevisionStep(revision_map, script, False)
+
+ @property
+ def is_downgrade(self) -> bool:
+ return not self.is_upgrade
+
+ @property
+ def short_log(self) -> str:
+ return "%s %s -> %s" % (
+ self.name,
+ util.format_as_comma(self.from_revisions_no_deps),
+ util.format_as_comma(self.to_revisions_no_deps),
+ )
+
+ def __str__(self):
+ if self.doc:
+ return "%s %s -> %s, %s" % (
+ self.name,
+ util.format_as_comma(self.from_revisions_no_deps),
+ util.format_as_comma(self.to_revisions_no_deps),
+ self.doc,
+ )
+ else:
+ return self.short_log
+
+
+class RevisionStep(MigrationStep):
+ def __init__(
+ self, revision_map: RevisionMap, revision: Script, is_upgrade: bool
+ ) -> None:
+ self.revision_map = revision_map
+ self.revision = revision
+ self.is_upgrade = is_upgrade
+ if is_upgrade:
+ self.migration_fn = (
+ revision.module.upgrade # type:ignore[attr-defined]
+ )
+ else:
+ self.migration_fn = (
+ revision.module.downgrade # type:ignore[attr-defined]
+ )
+
+ def __repr__(self):
+ return "RevisionStep(%r, is_upgrade=%r)" % (
+ self.revision.revision,
+ self.is_upgrade,
+ )
+
+ def __eq__(self, other: object) -> bool:
+ return (
+ isinstance(other, RevisionStep)
+ and other.revision == self.revision
+ and self.is_upgrade == other.is_upgrade
+ )
+
+ @property
+ def doc(self) -> str:
+ return self.revision.doc
+
+ @property
+ def from_revisions(self) -> Tuple[str, ...]:
+ if self.is_upgrade:
+ return self.revision._normalized_down_revisions
+ else:
+ return (self.revision.revision,)
+
+ @property
+ def from_revisions_no_deps( # type:ignore[override]
+ self,
+ ) -> Tuple[str, ...]:
+ if self.is_upgrade:
+ return self.revision._versioned_down_revisions
+ else:
+ return (self.revision.revision,)
+
+ @property
+ def to_revisions(self) -> Tuple[str, ...]:
+ if self.is_upgrade:
+ return (self.revision.revision,)
+ else:
+ return self.revision._normalized_down_revisions
+
+ @property
+ def to_revisions_no_deps( # type:ignore[override]
+ self,
+ ) -> Tuple[str, ...]:
+ if self.is_upgrade:
+ return (self.revision.revision,)
+ else:
+ return self.revision._versioned_down_revisions
+
+ @property
+ def _has_scalar_down_revision(self) -> bool:
+ return len(self.revision._normalized_down_revisions) == 1
+
+ def should_delete_branch(self, heads: Set[str]) -> bool:
+ """A delete is when we are a. in a downgrade and b.
+ we are going to the "base" or we are going to a version that
+ is implied as a dependency on another version that is remaining.
+
+ """
+ if not self.is_downgrade:
+ return False
+
+ if self.revision.revision not in heads:
+ return False
+
+ downrevs = self.revision._normalized_down_revisions
+
+ if not downrevs:
+ # is a base
+ return True
+ else:
+ # determine what the ultimate "to_revisions" for an
+ # unmerge would be. If there are none, then we're a delete.
+ to_revisions = self._unmerge_to_revisions(heads)
+ return not to_revisions
+
+ def merge_branch_idents(
+ self, heads: Set[str]
+ ) -> Tuple[List[str], str, str]:
+ other_heads = set(heads).difference(self.from_revisions)
+
+ if other_heads:
+ ancestors = {
+ r.revision
+ for r in self.revision_map._get_ancestor_nodes(
+ self.revision_map.get_revisions(other_heads), check=False
+ )
+ }
+ from_revisions = list(
+ set(self.from_revisions).difference(ancestors)
+ )
+ else:
+ from_revisions = list(self.from_revisions)
+
+ return (
+ # delete revs, update from rev, update to rev
+ list(from_revisions[0:-1]),
+ from_revisions[-1],
+ self.to_revisions[0],
+ )
+
+ def _unmerge_to_revisions(self, heads: Set[str]) -> Tuple[str, ...]:
+ other_heads = set(heads).difference([self.revision.revision])
+ if other_heads:
+ ancestors = {
+ r.revision
+ for r in self.revision_map._get_ancestor_nodes(
+ self.revision_map.get_revisions(other_heads), check=False
+ )
+ }
+ return tuple(set(self.to_revisions).difference(ancestors))
+ else:
+ return self.to_revisions
+
+ def unmerge_branch_idents(
+ self, heads: Set[str]
+ ) -> Tuple[str, str, Tuple[str, ...]]:
+ to_revisions = self._unmerge_to_revisions(heads)
+
+ return (
+ # update from rev, update to rev, insert revs
+ self.from_revisions[0],
+ to_revisions[-1],
+ to_revisions[0:-1],
+ )
+
+ def should_create_branch(self, heads: Set[str]) -> bool:
+ if not self.is_upgrade:
+ return False
+
+ downrevs = self.revision._normalized_down_revisions
+
+ if not downrevs:
+ # is a base
+ return True
+ else:
+ # none of our downrevs are present, so...
+ # we have to insert our version. This is true whether
+ # or not there is only one downrev, or multiple (in the latter
+ # case, we're a merge point.)
+ if not heads.intersection(downrevs):
+ return True
+ else:
+ return False
+
+ def should_merge_branches(self, heads: Set[str]) -> bool:
+ if not self.is_upgrade:
+ return False
+
+ downrevs = self.revision._normalized_down_revisions
+
+ if len(downrevs) > 1 and len(heads.intersection(downrevs)) > 1:
+ return True
+
+ return False
+
+ def should_unmerge_branches(self, heads: Set[str]) -> bool:
+ if not self.is_downgrade:
+ return False
+
+ downrevs = self.revision._normalized_down_revisions
+
+ if self.revision.revision in heads and len(downrevs) > 1:
+ return True
+
+ return False
+
+ def update_version_num(self, heads: Set[str]) -> Tuple[str, str]:
+ if not self._has_scalar_down_revision:
+ downrev = heads.intersection(
+ self.revision._normalized_down_revisions
+ )
+ assert (
+ len(downrev) == 1
+ ), "Can't do an UPDATE because downrevision is ambiguous"
+ down_revision = list(downrev)[0]
+ else:
+ down_revision = self.revision._normalized_down_revisions[0]
+
+ if self.is_upgrade:
+ return down_revision, self.revision.revision
+ else:
+ return self.revision.revision, down_revision
+
+ @property
+ def delete_version_num(self) -> str:
+ return self.revision.revision
+
+ @property
+ def insert_version_num(self) -> str:
+ return self.revision.revision
+
+ @property
+ def info(self) -> MigrationInfo:
+ return MigrationInfo(
+ revision_map=self.revision_map,
+ up_revisions=self.revision.revision,
+ down_revisions=self.revision._normalized_down_revisions,
+ is_upgrade=self.is_upgrade,
+ is_stamp=False,
+ )
+
+
+class StampStep(MigrationStep):
+ def __init__(
+ self,
+ from_: Optional[Union[str, Collection[str]]],
+ to_: Optional[Union[str, Collection[str]]],
+ is_upgrade: bool,
+ branch_move: bool,
+ revision_map: Optional[RevisionMap] = None,
+ ) -> None:
+ self.from_: Tuple[str, ...] = util.to_tuple(from_, default=())
+ self.to_: Tuple[str, ...] = util.to_tuple(to_, default=())
+ self.is_upgrade = is_upgrade
+ self.branch_move = branch_move
+ self.migration_fn = self.stamp_revision
+ self.revision_map = revision_map
+
+ doc: Optional[str] = None
+
+ def stamp_revision(self, **kw: Any) -> None:
+ return None
+
+ def __eq__(self, other):
+ return (
+ isinstance(other, StampStep)
+ and other.from_revisions == self.revisions
+ and other.to_revisions == self.to_revisions
+ and other.branch_move == self.branch_move
+ and self.is_upgrade == other.is_upgrade
+ )
+
+ @property
+ def from_revisions(self):
+ return self.from_
+
+ @property
+ def to_revisions(self) -> Tuple[str, ...]:
+ return self.to_
+
+ @property
+ def from_revisions_no_deps( # type:ignore[override]
+ self,
+ ) -> Tuple[str, ...]:
+ return self.from_
+
+ @property
+ def to_revisions_no_deps( # type:ignore[override]
+ self,
+ ) -> Tuple[str, ...]:
+ return self.to_
+
+ @property
+ def delete_version_num(self) -> str:
+ assert len(self.from_) == 1
+ return self.from_[0]
+
+ @property
+ def insert_version_num(self) -> str:
+ assert len(self.to_) == 1
+ return self.to_[0]
+
+ def update_version_num(self, heads: Set[str]) -> Tuple[str, str]:
+ assert len(self.from_) == 1
+ assert len(self.to_) == 1
+ return self.from_[0], self.to_[0]
+
+ def merge_branch_idents(
+ self, heads: Union[Set[str], List[str]]
+ ) -> Union[Tuple[List[Any], str, str], Tuple[List[str], str, str]]:
+ return (
+ # delete revs, update from rev, update to rev
+ list(self.from_[0:-1]),
+ self.from_[-1],
+ self.to_[0],
+ )
+
+ def unmerge_branch_idents(
+ self, heads: Set[str]
+ ) -> Tuple[str, str, List[str]]:
+ return (
+ # update from rev, update to rev, insert revs
+ self.from_[0],
+ self.to_[-1],
+ list(self.to_[0:-1]),
+ )
+
+ def should_delete_branch(self, heads: Set[str]) -> bool:
+ # TODO: we probably need to look for self.to_ inside of heads,
+ # in a similar manner as should_create_branch, however we have
+ # no tests for this yet (stamp downgrades w/ branches)
+ return self.is_downgrade and self.branch_move
+
+ def should_create_branch(self, heads: Set[str]) -> Union[Set[str], bool]:
+ return (
+ self.is_upgrade
+ and (self.branch_move or set(self.from_).difference(heads))
+ and set(self.to_).difference(heads)
+ )
+
+ def should_merge_branches(self, heads: Set[str]) -> bool:
+ return len(self.from_) > 1
+
+ def should_unmerge_branches(self, heads: Set[str]) -> bool:
+ return len(self.to_) > 1
+
+ @property
+ def info(self) -> MigrationInfo:
+ up, down = (
+ (self.to_, self.from_)
+ if self.is_upgrade
+ else (self.from_, self.to_)
+ )
+ assert self.revision_map is not None
+ return MigrationInfo(
+ revision_map=self.revision_map,
+ up_revisions=up,
+ down_revisions=down,
+ is_upgrade=self.is_upgrade,
+ is_stamp=True,
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/script/__init__.py b/Backend/venv/lib/python3.12/site-packages/alembic/script/__init__.py
new file mode 100644
index 00000000..d78f3f1d
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/script/__init__.py
@@ -0,0 +1,4 @@
+from .base import Script
+from .base import ScriptDirectory
+
+__all__ = ["ScriptDirectory", "Script"]
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..d953a514
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/base.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/base.cpython-312.pyc
new file mode 100644
index 00000000..c4461488
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/base.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/revision.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/revision.cpython-312.pyc
new file mode 100644
index 00000000..8e4feb8e
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/revision.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/write_hooks.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/write_hooks.cpython-312.pyc
new file mode 100644
index 00000000..a144d5b1
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/script/__pycache__/write_hooks.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/script/base.py b/Backend/venv/lib/python3.12/site-packages/alembic/script/base.py
new file mode 100644
index 00000000..d0f9abbd
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/script/base.py
@@ -0,0 +1,1053 @@
+from __future__ import annotations
+
+from contextlib import contextmanager
+import datetime
+import os
+import re
+import shutil
+import sys
+from types import ModuleType
+from typing import Any
+from typing import cast
+from typing import Iterator
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import Union
+
+from . import revision
+from . import write_hooks
+from .. import util
+from ..runtime import migration
+from ..util import not_none
+
+if TYPE_CHECKING:
+ from .revision import _GetRevArg
+ from .revision import _RevIdType
+ from .revision import Revision
+ from ..config import Config
+ from ..config import MessagingOptions
+ from ..runtime.migration import RevisionStep
+ from ..runtime.migration import StampStep
+
+try:
+ from dateutil import tz
+except ImportError:
+ tz = None # type: ignore[assignment]
+
+_sourceless_rev_file = re.compile(r"(?!\.\#|__init__)(.*\.py)(c|o)?$")
+_only_source_rev_file = re.compile(r"(?!\.\#|__init__)(.*\.py)$")
+_legacy_rev = re.compile(r"([a-f0-9]+)\.py$")
+_slug_re = re.compile(r"\w+")
+_default_file_template = "%(rev)s_%(slug)s"
+_split_on_space_comma = re.compile(r", *|(?: +)")
+
+_split_on_space_comma_colon = re.compile(r", *|(?: +)|\:")
+
+
+class ScriptDirectory:
+
+ """Provides operations upon an Alembic script directory.
+
+ This object is useful to get information as to current revisions,
+ most notably being able to get at the "head" revision, for schemes
+ that want to test if the current revision in the database is the most
+ recent::
+
+ from alembic.script import ScriptDirectory
+ from alembic.config import Config
+ config = Config()
+ config.set_main_option("script_location", "myapp:migrations")
+ script = ScriptDirectory.from_config(config)
+
+ head_revision = script.get_current_head()
+
+
+
+ """
+
+ def __init__(
+ self,
+ dir: str, # noqa
+ file_template: str = _default_file_template,
+ truncate_slug_length: Optional[int] = 40,
+ version_locations: Optional[List[str]] = None,
+ sourceless: bool = False,
+ output_encoding: str = "utf-8",
+ timezone: Optional[str] = None,
+ hook_config: Optional[Mapping[str, str]] = None,
+ recursive_version_locations: bool = False,
+ messaging_opts: MessagingOptions = cast(
+ "MessagingOptions", util.EMPTY_DICT
+ ),
+ ) -> None:
+ self.dir = dir
+ self.file_template = file_template
+ self.version_locations = version_locations
+ self.truncate_slug_length = truncate_slug_length or 40
+ self.sourceless = sourceless
+ self.output_encoding = output_encoding
+ self.revision_map = revision.RevisionMap(self._load_revisions)
+ self.timezone = timezone
+ self.hook_config = hook_config
+ self.recursive_version_locations = recursive_version_locations
+ self.messaging_opts = messaging_opts
+
+ if not os.access(dir, os.F_OK):
+ raise util.CommandError(
+ "Path doesn't exist: %r. Please use "
+ "the 'init' command to create a new "
+ "scripts folder." % os.path.abspath(dir)
+ )
+
+ @property
+ def versions(self) -> str:
+ loc = self._version_locations
+ if len(loc) > 1:
+ raise util.CommandError("Multiple version_locations present")
+ else:
+ return loc[0]
+
+ @util.memoized_property
+ def _version_locations(self):
+ if self.version_locations:
+ return [
+ os.path.abspath(util.coerce_resource_to_filename(location))
+ for location in self.version_locations
+ ]
+ else:
+ return (os.path.abspath(os.path.join(self.dir, "versions")),)
+
+ def _load_revisions(self) -> Iterator[Script]:
+ if self.version_locations:
+ paths = [
+ vers
+ for vers in self._version_locations
+ if os.path.exists(vers)
+ ]
+ else:
+ paths = [self.versions]
+
+ dupes = set()
+ for vers in paths:
+ for file_path in Script._list_py_dir(self, vers):
+ real_path = os.path.realpath(file_path)
+ if real_path in dupes:
+ util.warn(
+ "File %s loaded twice! ignoring. Please ensure "
+ "version_locations is unique." % real_path
+ )
+ continue
+ dupes.add(real_path)
+
+ filename = os.path.basename(real_path)
+ dir_name = os.path.dirname(real_path)
+ script = Script._from_filename(self, dir_name, filename)
+ if script is None:
+ continue
+ yield script
+
+ @classmethod
+ def from_config(cls, config: Config) -> ScriptDirectory:
+ """Produce a new :class:`.ScriptDirectory` given a :class:`.Config`
+ instance.
+
+ The :class:`.Config` need only have the ``script_location`` key
+ present.
+
+ """
+ script_location = config.get_main_option("script_location")
+ if script_location is None:
+ raise util.CommandError(
+ "No 'script_location' key " "found in configuration."
+ )
+ truncate_slug_length: Optional[int]
+ tsl = config.get_main_option("truncate_slug_length")
+ if tsl is not None:
+ truncate_slug_length = int(tsl)
+ else:
+ truncate_slug_length = None
+
+ version_locations_str = config.get_main_option("version_locations")
+ version_locations: Optional[List[str]]
+ if version_locations_str:
+ version_path_separator = config.get_main_option(
+ "version_path_separator"
+ )
+
+ split_on_path = {
+ None: None,
+ "space": " ",
+ "os": os.pathsep,
+ ":": ":",
+ ";": ";",
+ }
+
+ try:
+ split_char: Optional[str] = split_on_path[
+ version_path_separator
+ ]
+ except KeyError as ke:
+ raise ValueError(
+ "'%s' is not a valid value for "
+ "version_path_separator; "
+ "expected 'space', 'os', ':', ';'" % version_path_separator
+ ) from ke
+ else:
+ if split_char is None:
+ # legacy behaviour for backwards compatibility
+ version_locations = _split_on_space_comma.split(
+ version_locations_str
+ )
+ else:
+ version_locations = [
+ x for x in version_locations_str.split(split_char) if x
+ ]
+ else:
+ version_locations = None
+
+ prepend_sys_path = config.get_main_option("prepend_sys_path")
+ if prepend_sys_path:
+ sys.path[:0] = list(
+ _split_on_space_comma_colon.split(prepend_sys_path)
+ )
+
+ rvl = config.get_main_option("recursive_version_locations") == "true"
+ return ScriptDirectory(
+ util.coerce_resource_to_filename(script_location),
+ file_template=config.get_main_option(
+ "file_template", _default_file_template
+ ),
+ truncate_slug_length=truncate_slug_length,
+ sourceless=config.get_main_option("sourceless") == "true",
+ output_encoding=config.get_main_option("output_encoding", "utf-8"),
+ version_locations=version_locations,
+ timezone=config.get_main_option("timezone"),
+ hook_config=config.get_section("post_write_hooks", {}),
+ recursive_version_locations=rvl,
+ messaging_opts=config.messaging_opts,
+ )
+
+ @contextmanager
+ def _catch_revision_errors(
+ self,
+ ancestor: Optional[str] = None,
+ multiple_heads: Optional[str] = None,
+ start: Optional[str] = None,
+ end: Optional[str] = None,
+ resolution: Optional[str] = None,
+ ) -> Iterator[None]:
+ try:
+ yield
+ except revision.RangeNotAncestorError as rna:
+ if start is None:
+ start = cast(Any, rna.lower)
+ if end is None:
+ end = cast(Any, rna.upper)
+ if not ancestor:
+ ancestor = (
+ "Requested range %(start)s:%(end)s does not refer to "
+ "ancestor/descendant revisions along the same branch"
+ )
+ ancestor = ancestor % {"start": start, "end": end}
+ raise util.CommandError(ancestor) from rna
+ except revision.MultipleHeads as mh:
+ if not multiple_heads:
+ multiple_heads = (
+ "Multiple head revisions are present for given "
+ "argument '%(head_arg)s'; please "
+ "specify a specific target revision, "
+ "'@%(head_arg)s' to "
+ "narrow to a specific head, or 'heads' for all heads"
+ )
+ multiple_heads = multiple_heads % {
+ "head_arg": end or mh.argument,
+ "heads": util.format_as_comma(mh.heads),
+ }
+ raise util.CommandError(multiple_heads) from mh
+ except revision.ResolutionError as re:
+ if resolution is None:
+ resolution = "Can't locate revision identified by '%s'" % (
+ re.argument
+ )
+ raise util.CommandError(resolution) from re
+ except revision.RevisionError as err:
+ raise util.CommandError(err.args[0]) from err
+
+ def walk_revisions(
+ self, base: str = "base", head: str = "heads"
+ ) -> Iterator[Script]:
+ """Iterate through all revisions.
+
+ :param base: the base revision, or "base" to start from the
+ empty revision.
+
+ :param head: the head revision; defaults to "heads" to indicate
+ all head revisions. May also be "head" to indicate a single
+ head revision.
+
+ """
+ with self._catch_revision_errors(start=base, end=head):
+ for rev in self.revision_map.iterate_revisions(
+ head, base, inclusive=True, assert_relative_length=False
+ ):
+ yield cast(Script, rev)
+
+ def get_revisions(self, id_: _GetRevArg) -> Tuple[Optional[Script], ...]:
+ """Return the :class:`.Script` instance with the given rev identifier,
+ symbolic name, or sequence of identifiers.
+
+ """
+ with self._catch_revision_errors():
+ return cast(
+ Tuple[Optional[Script], ...],
+ self.revision_map.get_revisions(id_),
+ )
+
+ def get_all_current(self, id_: Tuple[str, ...]) -> Set[Optional[Script]]:
+ with self._catch_revision_errors():
+ return cast(
+ Set[Optional[Script]], self.revision_map._get_all_current(id_)
+ )
+
+ def get_revision(self, id_: str) -> Optional[Script]:
+ """Return the :class:`.Script` instance with the given rev id.
+
+ .. seealso::
+
+ :meth:`.ScriptDirectory.get_revisions`
+
+ """
+
+ with self._catch_revision_errors():
+ return cast(Optional[Script], self.revision_map.get_revision(id_))
+
+ def as_revision_number(
+ self, id_: Optional[str]
+ ) -> Optional[Union[str, Tuple[str, ...]]]:
+ """Convert a symbolic revision, i.e. 'head' or 'base', into
+ an actual revision number."""
+
+ with self._catch_revision_errors():
+ rev, branch_name = self.revision_map._resolve_revision_number(id_)
+
+ if not rev:
+ # convert () to None
+ return None
+ elif id_ == "heads":
+ return rev
+ else:
+ return rev[0]
+
+ def iterate_revisions(
+ self,
+ upper: Union[str, Tuple[str, ...], None],
+ lower: Union[str, Tuple[str, ...], None],
+ **kw: Any,
+ ) -> Iterator[Script]:
+ """Iterate through script revisions, starting at the given
+ upper revision identifier and ending at the lower.
+
+ The traversal uses strictly the `down_revision`
+ marker inside each migration script, so
+ it is a requirement that upper >= lower,
+ else you'll get nothing back.
+
+ The iterator yields :class:`.Script` objects.
+
+ .. seealso::
+
+ :meth:`.RevisionMap.iterate_revisions`
+
+ """
+ return cast(
+ Iterator[Script],
+ self.revision_map.iterate_revisions(upper, lower, **kw),
+ )
+
+ def get_current_head(self) -> Optional[str]:
+ """Return the current head revision.
+
+ If the script directory has multiple heads
+ due to branching, an error is raised;
+ :meth:`.ScriptDirectory.get_heads` should be
+ preferred.
+
+ :return: a string revision number.
+
+ .. seealso::
+
+ :meth:`.ScriptDirectory.get_heads`
+
+ """
+ with self._catch_revision_errors(
+ multiple_heads=(
+ "The script directory has multiple heads (due to branching)."
+ "Please use get_heads(), or merge the branches using "
+ "alembic merge."
+ )
+ ):
+ return self.revision_map.get_current_head()
+
+ def get_heads(self) -> List[str]:
+ """Return all "versioned head" revisions as strings.
+
+ This is normally a list of length one,
+ unless branches are present. The
+ :meth:`.ScriptDirectory.get_current_head()` method
+ can be used normally when a script directory
+ has only one head.
+
+ :return: a tuple of string revision numbers.
+ """
+ return list(self.revision_map.heads)
+
+ def get_base(self) -> Optional[str]:
+ """Return the "base" revision as a string.
+
+ This is the revision number of the script that
+ has a ``down_revision`` of None.
+
+ If the script directory has multiple bases, an error is raised;
+ :meth:`.ScriptDirectory.get_bases` should be
+ preferred.
+
+ """
+ bases = self.get_bases()
+ if len(bases) > 1:
+ raise util.CommandError(
+ "The script directory has multiple bases. "
+ "Please use get_bases()."
+ )
+ elif bases:
+ return bases[0]
+ else:
+ return None
+
+ def get_bases(self) -> List[str]:
+ """return all "base" revisions as strings.
+
+ This is the revision number of all scripts that
+ have a ``down_revision`` of None.
+
+ """
+ return list(self.revision_map.bases)
+
+ def _upgrade_revs(
+ self, destination: str, current_rev: str
+ ) -> List[RevisionStep]:
+ with self._catch_revision_errors(
+ ancestor="Destination %(end)s is not a valid upgrade "
+ "target from current head(s)",
+ end=destination,
+ ):
+ revs = self.iterate_revisions(
+ destination, current_rev, implicit_base=True
+ )
+ return [
+ migration.MigrationStep.upgrade_from_script(
+ self.revision_map, script
+ )
+ for script in reversed(list(revs))
+ ]
+
+ def _downgrade_revs(
+ self, destination: str, current_rev: Optional[str]
+ ) -> List[RevisionStep]:
+ with self._catch_revision_errors(
+ ancestor="Destination %(end)s is not a valid downgrade "
+ "target from current head(s)",
+ end=destination,
+ ):
+ revs = self.iterate_revisions(
+ current_rev, destination, select_for_downgrade=True
+ )
+ return [
+ migration.MigrationStep.downgrade_from_script(
+ self.revision_map, script
+ )
+ for script in revs
+ ]
+
+ def _stamp_revs(
+ self, revision: _RevIdType, heads: _RevIdType
+ ) -> List[StampStep]:
+ with self._catch_revision_errors(
+ multiple_heads="Multiple heads are present; please specify a "
+ "single target revision"
+ ):
+ heads_revs = self.get_revisions(heads)
+
+ steps = []
+
+ if not revision:
+ revision = "base"
+
+ filtered_heads: List[Script] = []
+ for rev in util.to_tuple(revision):
+ if rev:
+ filtered_heads.extend(
+ self.revision_map.filter_for_lineage(
+ cast(Sequence[Script], heads_revs),
+ rev,
+ include_dependencies=True,
+ )
+ )
+ filtered_heads = util.unique_list(filtered_heads)
+
+ dests = self.get_revisions(revision) or [None]
+
+ for dest in dests:
+ if dest is None:
+ # dest is 'base'. Return a "delete branch" migration
+ # for all applicable heads.
+ steps.extend(
+ [
+ migration.StampStep(
+ head.revision,
+ None,
+ False,
+ True,
+ self.revision_map,
+ )
+ for head in filtered_heads
+ ]
+ )
+ continue
+ elif dest in filtered_heads:
+ # the dest is already in the version table, do nothing.
+ continue
+
+ # figure out if the dest is a descendant or an
+ # ancestor of the selected nodes
+ descendants = set(
+ self.revision_map._get_descendant_nodes([dest])
+ )
+ ancestors = set(self.revision_map._get_ancestor_nodes([dest]))
+
+ if descendants.intersection(filtered_heads):
+ # heads are above the target, so this is a downgrade.
+ # we can treat them as a "merge", single step.
+ assert not ancestors.intersection(filtered_heads)
+ todo_heads = [head.revision for head in filtered_heads]
+ step = migration.StampStep(
+ todo_heads,
+ dest.revision,
+ False,
+ False,
+ self.revision_map,
+ )
+ steps.append(step)
+ continue
+ elif ancestors.intersection(filtered_heads):
+ # heads are below the target, so this is an upgrade.
+ # we can treat them as a "merge", single step.
+ todo_heads = [head.revision for head in filtered_heads]
+ step = migration.StampStep(
+ todo_heads,
+ dest.revision,
+ True,
+ False,
+ self.revision_map,
+ )
+ steps.append(step)
+ continue
+ else:
+ # destination is in a branch not represented,
+ # treat it as new branch
+ step = migration.StampStep(
+ (), dest.revision, True, True, self.revision_map
+ )
+ steps.append(step)
+ continue
+
+ return steps
+
+ def run_env(self) -> None:
+ """Run the script environment.
+
+ This basically runs the ``env.py`` script present
+ in the migration environment. It is called exclusively
+ by the command functions in :mod:`alembic.command`.
+
+
+ """
+ util.load_python_file(self.dir, "env.py")
+
+ @property
+ def env_py_location(self):
+ return os.path.abspath(os.path.join(self.dir, "env.py"))
+
+ def _generate_template(self, src: str, dest: str, **kw: Any) -> None:
+ with util.status(
+ f"Generating {os.path.abspath(dest)}", **self.messaging_opts
+ ):
+ util.template_to_file(src, dest, self.output_encoding, **kw)
+
+ def _copy_file(self, src: str, dest: str) -> None:
+ with util.status(
+ f"Generating {os.path.abspath(dest)}", **self.messaging_opts
+ ):
+ shutil.copy(src, dest)
+
+ def _ensure_directory(self, path: str) -> None:
+ path = os.path.abspath(path)
+ if not os.path.exists(path):
+ with util.status(
+ f"Creating directory {path}", **self.messaging_opts
+ ):
+ os.makedirs(path)
+
+ def _generate_create_date(self) -> datetime.datetime:
+ if self.timezone is not None:
+ if tz is None:
+ raise util.CommandError(
+ "The library 'python-dateutil' is required "
+ "for timezone support"
+ )
+ # First, assume correct capitalization
+ tzinfo = tz.gettz(self.timezone)
+ if tzinfo is None:
+ # Fall back to uppercase
+ tzinfo = tz.gettz(self.timezone.upper())
+ if tzinfo is None:
+ raise util.CommandError(
+ "Can't locate timezone: %s" % self.timezone
+ )
+ create_date = (
+ datetime.datetime.utcnow()
+ .replace(tzinfo=tz.tzutc())
+ .astimezone(tzinfo)
+ )
+ else:
+ create_date = datetime.datetime.now()
+ return create_date
+
+ def generate_revision(
+ self,
+ revid: str,
+ message: Optional[str],
+ head: Optional[_RevIdType] = None,
+ splice: Optional[bool] = False,
+ branch_labels: Optional[_RevIdType] = None,
+ version_path: Optional[str] = None,
+ depends_on: Optional[_RevIdType] = None,
+ **kw: Any,
+ ) -> Optional[Script]:
+ """Generate a new revision file.
+
+ This runs the ``script.py.mako`` template, given
+ template arguments, and creates a new file.
+
+ :param revid: String revision id. Typically this
+ comes from ``alembic.util.rev_id()``.
+ :param message: the revision message, the one passed
+ by the -m argument to the ``revision`` command.
+ :param head: the head revision to generate against. Defaults
+ to the current "head" if no branches are present, else raises
+ an exception.
+ :param splice: if True, allow the "head" version to not be an
+ actual head; otherwise, the selected head must be a head
+ (e.g. endpoint) revision.
+
+ """
+ if head is None:
+ head = "head"
+
+ try:
+ Script.verify_rev_id(revid)
+ except revision.RevisionError as err:
+ raise util.CommandError(err.args[0]) from err
+
+ with self._catch_revision_errors(
+ multiple_heads=(
+ "Multiple heads are present; please specify the head "
+ "revision on which the new revision should be based, "
+ "or perform a merge."
+ )
+ ):
+ heads = cast(
+ Tuple[Optional["Revision"], ...],
+ self.revision_map.get_revisions(head),
+ )
+ for h in heads:
+ assert h != "base"
+
+ if len(set(heads)) != len(heads):
+ raise util.CommandError("Duplicate head revisions specified")
+
+ create_date = self._generate_create_date()
+
+ if version_path is None:
+ if len(self._version_locations) > 1:
+ for head_ in heads:
+ if head_ is not None:
+ assert isinstance(head_, Script)
+ version_path = os.path.dirname(head_.path)
+ break
+ else:
+ raise util.CommandError(
+ "Multiple version locations present, "
+ "please specify --version-path"
+ )
+ else:
+ version_path = self.versions
+
+ norm_path = os.path.normpath(os.path.abspath(version_path))
+ for vers_path in self._version_locations:
+ if os.path.normpath(vers_path) == norm_path:
+ break
+ else:
+ raise util.CommandError(
+ "Path %s is not represented in current "
+ "version locations" % version_path
+ )
+
+ if self.version_locations:
+ self._ensure_directory(version_path)
+
+ path = self._rev_path(version_path, revid, message, create_date)
+
+ if not splice:
+ for head_ in heads:
+ if head_ is not None and not head_.is_head:
+ raise util.CommandError(
+ "Revision %s is not a head revision; please specify "
+ "--splice to create a new branch from this revision"
+ % head_.revision
+ )
+
+ resolved_depends_on: Optional[List[str]]
+ if depends_on:
+ with self._catch_revision_errors():
+ resolved_depends_on = [
+ dep
+ if dep in rev.branch_labels # maintain branch labels
+ else rev.revision # resolve partial revision identifiers
+ for rev, dep in [
+ (not_none(self.revision_map.get_revision(dep)), dep)
+ for dep in util.to_list(depends_on)
+ ]
+ ]
+ else:
+ resolved_depends_on = None
+
+ self._generate_template(
+ os.path.join(self.dir, "script.py.mako"),
+ path,
+ up_revision=str(revid),
+ down_revision=revision.tuple_rev_as_scalar(
+ tuple(h.revision if h is not None else None for h in heads)
+ ),
+ branch_labels=util.to_tuple(branch_labels),
+ depends_on=revision.tuple_rev_as_scalar(resolved_depends_on),
+ create_date=create_date,
+ comma=util.format_as_comma,
+ message=message if message is not None else ("empty message"),
+ **kw,
+ )
+
+ post_write_hooks = self.hook_config
+ if post_write_hooks:
+ write_hooks._run_hooks(path, post_write_hooks)
+
+ try:
+ script = Script._from_path(self, path)
+ except revision.RevisionError as err:
+ raise util.CommandError(err.args[0]) from err
+ if script is None:
+ return None
+ if branch_labels and not script.branch_labels:
+ raise util.CommandError(
+ "Version %s specified branch_labels %s, however the "
+ "migration file %s does not have them; have you upgraded "
+ "your script.py.mako to include the "
+ "'branch_labels' section?"
+ % (script.revision, branch_labels, script.path)
+ )
+ self.revision_map.add_revision(script)
+ return script
+
+ def _rev_path(
+ self,
+ path: str,
+ rev_id: str,
+ message: Optional[str],
+ create_date: datetime.datetime,
+ ) -> str:
+ epoch = int(create_date.timestamp())
+ slug = "_".join(_slug_re.findall(message or "")).lower()
+ if len(slug) > self.truncate_slug_length:
+ slug = slug[: self.truncate_slug_length].rsplit("_", 1)[0] + "_"
+ filename = "%s.py" % (
+ self.file_template
+ % {
+ "rev": rev_id,
+ "slug": slug,
+ "epoch": epoch,
+ "year": create_date.year,
+ "month": create_date.month,
+ "day": create_date.day,
+ "hour": create_date.hour,
+ "minute": create_date.minute,
+ "second": create_date.second,
+ }
+ )
+ return os.path.join(path, filename)
+
+
+class Script(revision.Revision):
+
+ """Represent a single revision file in a ``versions/`` directory.
+
+ The :class:`.Script` instance is returned by methods
+ such as :meth:`.ScriptDirectory.iterate_revisions`.
+
+ """
+
+ def __init__(self, module: ModuleType, rev_id: str, path: str):
+ self.module = module
+ self.path = path
+ super().__init__(
+ rev_id,
+ module.down_revision, # type: ignore[attr-defined]
+ branch_labels=util.to_tuple(
+ getattr(module, "branch_labels", None), default=()
+ ),
+ dependencies=util.to_tuple(
+ getattr(module, "depends_on", None), default=()
+ ),
+ )
+
+ module: ModuleType
+ """The Python module representing the actual script itself."""
+
+ path: str
+ """Filesystem path of the script."""
+
+ _db_current_indicator: Optional[bool] = None
+ """Utility variable which when set will cause string output to indicate
+ this is a "current" version in some database"""
+
+ @property
+ def doc(self) -> str:
+ """Return the docstring given in the script."""
+
+ return re.split("\n\n", self.longdoc)[0]
+
+ @property
+ def longdoc(self) -> str:
+ """Return the docstring given in the script."""
+
+ doc = self.module.__doc__
+ if doc:
+ if hasattr(self.module, "_alembic_source_encoding"):
+ doc = doc.decode( # type: ignore[attr-defined]
+ self.module._alembic_source_encoding # type: ignore[attr-defined] # noqa
+ )
+ return doc.strip() # type: ignore[union-attr]
+ else:
+ return ""
+
+ @property
+ def log_entry(self) -> str:
+ entry = "Rev: %s%s%s%s%s\n" % (
+ self.revision,
+ " (head)" if self.is_head else "",
+ " (branchpoint)" if self.is_branch_point else "",
+ " (mergepoint)" if self.is_merge_point else "",
+ " (current)" if self._db_current_indicator else "",
+ )
+ if self.is_merge_point:
+ entry += "Merges: %s\n" % (self._format_down_revision(),)
+ else:
+ entry += "Parent: %s\n" % (self._format_down_revision(),)
+
+ if self.dependencies:
+ entry += "Also depends on: %s\n" % (
+ util.format_as_comma(self.dependencies)
+ )
+
+ if self.is_branch_point:
+ entry += "Branches into: %s\n" % (
+ util.format_as_comma(self.nextrev)
+ )
+
+ if self.branch_labels:
+ entry += "Branch names: %s\n" % (
+ util.format_as_comma(self.branch_labels),
+ )
+
+ entry += "Path: %s\n" % (self.path,)
+
+ entry += "\n%s\n" % (
+ "\n".join(" %s" % para for para in self.longdoc.splitlines())
+ )
+ return entry
+
+ def __str__(self):
+ return "%s -> %s%s%s%s, %s" % (
+ self._format_down_revision(),
+ self.revision,
+ " (head)" if self.is_head else "",
+ " (branchpoint)" if self.is_branch_point else "",
+ " (mergepoint)" if self.is_merge_point else "",
+ self.doc,
+ )
+
+ def _head_only(
+ self,
+ include_branches: bool = False,
+ include_doc: bool = False,
+ include_parents: bool = False,
+ tree_indicators: bool = True,
+ head_indicators: bool = True,
+ ) -> str:
+ text = self.revision
+ if include_parents:
+ if self.dependencies:
+ text = "%s (%s) -> %s" % (
+ self._format_down_revision(),
+ util.format_as_comma(self.dependencies),
+ text,
+ )
+ else:
+ text = "%s -> %s" % (self._format_down_revision(), text)
+ assert text is not None
+ if include_branches and self.branch_labels:
+ text += " (%s)" % util.format_as_comma(self.branch_labels)
+ if head_indicators or tree_indicators:
+ text += "%s%s%s" % (
+ " (head)" if self._is_real_head else "",
+ " (effective head)"
+ if self.is_head and not self._is_real_head
+ else "",
+ " (current)" if self._db_current_indicator else "",
+ )
+ if tree_indicators:
+ text += "%s%s" % (
+ " (branchpoint)" if self.is_branch_point else "",
+ " (mergepoint)" if self.is_merge_point else "",
+ )
+ if include_doc:
+ text += ", %s" % self.doc
+ return text
+
+ def cmd_format(
+ self,
+ verbose: bool,
+ include_branches: bool = False,
+ include_doc: bool = False,
+ include_parents: bool = False,
+ tree_indicators: bool = True,
+ ) -> str:
+ if verbose:
+ return self.log_entry
+ else:
+ return self._head_only(
+ include_branches, include_doc, include_parents, tree_indicators
+ )
+
+ def _format_down_revision(self) -> str:
+ if not self.down_revision:
+ return ""
+ else:
+ return util.format_as_comma(self._versioned_down_revisions)
+
+ @classmethod
+ def _from_path(
+ cls, scriptdir: ScriptDirectory, path: str
+ ) -> Optional[Script]:
+ dir_, filename = os.path.split(path)
+ return cls._from_filename(scriptdir, dir_, filename)
+
+ @classmethod
+ def _list_py_dir(cls, scriptdir: ScriptDirectory, path: str) -> List[str]:
+ paths = []
+ for root, dirs, files in os.walk(path, topdown=True):
+ if root.endswith("__pycache__"):
+ # a special case - we may include these files
+ # if a `sourceless` option is specified
+ continue
+
+ for filename in sorted(files):
+ paths.append(os.path.join(root, filename))
+
+ if scriptdir.sourceless:
+ # look for __pycache__
+ py_cache_path = os.path.join(root, "__pycache__")
+ if os.path.exists(py_cache_path):
+ # add all files from __pycache__ whose filename is not
+ # already in the names we got from the version directory.
+ # add as relative paths including __pycache__ token
+ names = {filename.split(".")[0] for filename in files}
+ paths.extend(
+ os.path.join(py_cache_path, pyc)
+ for pyc in os.listdir(py_cache_path)
+ if pyc.split(".")[0] not in names
+ )
+
+ if not scriptdir.recursive_version_locations:
+ break
+
+ # the real script order is defined by revision,
+ # but it may be undefined if there are many files with a same
+ # `down_revision`, for a better user experience (ex. debugging),
+ # we use a deterministic order
+ dirs.sort()
+
+ return paths
+
+ @classmethod
+ def _from_filename(
+ cls, scriptdir: ScriptDirectory, dir_: str, filename: str
+ ) -> Optional[Script]:
+ if scriptdir.sourceless:
+ py_match = _sourceless_rev_file.match(filename)
+ else:
+ py_match = _only_source_rev_file.match(filename)
+
+ if not py_match:
+ return None
+
+ py_filename = py_match.group(1)
+
+ if scriptdir.sourceless:
+ is_c = py_match.group(2) == "c"
+ is_o = py_match.group(2) == "o"
+ else:
+ is_c = is_o = False
+
+ if is_o or is_c:
+ py_exists = os.path.exists(os.path.join(dir_, py_filename))
+ pyc_exists = os.path.exists(os.path.join(dir_, py_filename + "c"))
+
+ # prefer .py over .pyc because we'd like to get the
+ # source encoding; prefer .pyc over .pyo because we'd like to
+ # have the docstrings which a -OO file would not have
+ if py_exists or is_o and pyc_exists:
+ return None
+
+ module = util.load_python_file(dir_, filename)
+
+ if not hasattr(module, "revision"):
+ # attempt to get the revision id from the script name,
+ # this for legacy only
+ m = _legacy_rev.match(filename)
+ if not m:
+ raise util.CommandError(
+ "Could not determine revision id from filename %s. "
+ "Be sure the 'revision' variable is "
+ "declared inside the script (please see 'Upgrading "
+ "from Alembic 0.1 to 0.2' in the documentation)."
+ % filename
+ )
+ else:
+ revision = m.group(1)
+ else:
+ revision = module.revision
+ return Script(module, revision, os.path.join(dir_, filename))
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/script/revision.py b/Backend/venv/lib/python3.12/site-packages/alembic/script/revision.py
new file mode 100644
index 00000000..03502644
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/script/revision.py
@@ -0,0 +1,1708 @@
+from __future__ import annotations
+
+import collections
+import re
+from typing import Any
+from typing import Callable
+from typing import cast
+from typing import Collection
+from typing import Deque
+from typing import Dict
+from typing import FrozenSet
+from typing import Iterable
+from typing import Iterator
+from typing import List
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Set
+from typing import Tuple
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from sqlalchemy import util as sqlautil
+
+from .. import util
+from ..util import not_none
+
+if TYPE_CHECKING:
+ from typing import Literal
+
+_RevIdType = Union[str, List[str], Tuple[str, ...]]
+_GetRevArg = Union[
+ str,
+ Iterable[Optional[str]],
+ Iterable[str],
+]
+_RevisionIdentifierType = Union[str, Tuple[str, ...], None]
+_RevisionOrStr = Union["Revision", str]
+_RevisionOrBase = Union["Revision", "Literal['base']"]
+_InterimRevisionMapType = Dict[str, "Revision"]
+_RevisionMapType = Dict[Union[None, str, Tuple[()]], Optional["Revision"]]
+_T = TypeVar("_T")
+_TR = TypeVar("_TR", bound=Optional[_RevisionOrStr])
+
+_relative_destination = re.compile(r"(?:(.+?)@)?(\w+)?((?:\+|-)\d+)")
+_revision_illegal_chars = ["@", "-", "+"]
+
+
+class RevisionError(Exception):
+ pass
+
+
+class RangeNotAncestorError(RevisionError):
+ def __init__(
+ self, lower: _RevisionIdentifierType, upper: _RevisionIdentifierType
+ ) -> None:
+ self.lower = lower
+ self.upper = upper
+ super().__init__(
+ "Revision %s is not an ancestor of revision %s"
+ % (lower or "base", upper or "base")
+ )
+
+
+class MultipleHeads(RevisionError):
+ def __init__(self, heads: Sequence[str], argument: Optional[str]) -> None:
+ self.heads = heads
+ self.argument = argument
+ super().__init__(
+ "Multiple heads are present for given argument '%s'; "
+ "%s" % (argument, ", ".join(heads))
+ )
+
+
+class ResolutionError(RevisionError):
+ def __init__(self, message: str, argument: str) -> None:
+ super().__init__(message)
+ self.argument = argument
+
+
+class CycleDetected(RevisionError):
+ kind = "Cycle"
+
+ def __init__(self, revisions: Sequence[str]) -> None:
+ self.revisions = revisions
+ super().__init__(
+ "%s is detected in revisions (%s)"
+ % (self.kind, ", ".join(revisions))
+ )
+
+
+class DependencyCycleDetected(CycleDetected):
+ kind = "Dependency cycle"
+
+ def __init__(self, revisions: Sequence[str]) -> None:
+ super().__init__(revisions)
+
+
+class LoopDetected(CycleDetected):
+ kind = "Self-loop"
+
+ def __init__(self, revision: str) -> None:
+ super().__init__([revision])
+
+
+class DependencyLoopDetected(DependencyCycleDetected, LoopDetected):
+ kind = "Dependency self-loop"
+
+ def __init__(self, revision: Sequence[str]) -> None:
+ super().__init__(revision)
+
+
+class RevisionMap:
+ """Maintains a map of :class:`.Revision` objects.
+
+ :class:`.RevisionMap` is used by :class:`.ScriptDirectory` to maintain
+ and traverse the collection of :class:`.Script` objects, which are
+ themselves instances of :class:`.Revision`.
+
+ """
+
+ def __init__(self, generator: Callable[[], Iterable[Revision]]) -> None:
+ """Construct a new :class:`.RevisionMap`.
+
+ :param generator: a zero-arg callable that will generate an iterable
+ of :class:`.Revision` instances to be used. These are typically
+ :class:`.Script` subclasses within regular Alembic use.
+
+ """
+ self._generator = generator
+
+ @util.memoized_property
+ def heads(self) -> Tuple[str, ...]:
+ """All "head" revisions as strings.
+
+ This is normally a tuple of length one,
+ unless unmerged branches are present.
+
+ :return: a tuple of string revision numbers.
+
+ """
+ self._revision_map
+ return self.heads
+
+ @util.memoized_property
+ def bases(self) -> Tuple[str, ...]:
+ """All "base" revisions as strings.
+
+ These are revisions that have a ``down_revision`` of None,
+ or empty tuple.
+
+ :return: a tuple of string revision numbers.
+
+ """
+ self._revision_map
+ return self.bases
+
+ @util.memoized_property
+ def _real_heads(self) -> Tuple[str, ...]:
+ """All "real" head revisions as strings.
+
+ :return: a tuple of string revision numbers.
+
+ """
+ self._revision_map
+ return self._real_heads
+
+ @util.memoized_property
+ def _real_bases(self) -> Tuple[str, ...]:
+ """All "real" base revisions as strings.
+
+ :return: a tuple of string revision numbers.
+
+ """
+ self._revision_map
+ return self._real_bases
+
+ @util.memoized_property
+ def _revision_map(self) -> _RevisionMapType:
+ """memoized attribute, initializes the revision map from the
+ initial collection.
+
+ """
+ # Ordering required for some tests to pass (but not required in
+ # general)
+ map_: _InterimRevisionMapType = sqlautil.OrderedDict()
+
+ heads: Set[Revision] = sqlautil.OrderedSet()
+ _real_heads: Set[Revision] = sqlautil.OrderedSet()
+ bases: Tuple[Revision, ...] = ()
+ _real_bases: Tuple[Revision, ...] = ()
+
+ has_branch_labels = set()
+ all_revisions = set()
+
+ for revision in self._generator():
+ all_revisions.add(revision)
+
+ if revision.revision in map_:
+ util.warn(
+ "Revision %s is present more than once" % revision.revision
+ )
+ map_[revision.revision] = revision
+ if revision.branch_labels:
+ has_branch_labels.add(revision)
+
+ heads.add(revision)
+ _real_heads.add(revision)
+ if revision.is_base:
+ bases += (revision,)
+ if revision._is_real_base:
+ _real_bases += (revision,)
+
+ # add the branch_labels to the map_. We'll need these
+ # to resolve the dependencies.
+ rev_map = map_.copy()
+ self._map_branch_labels(
+ has_branch_labels, cast(_RevisionMapType, map_)
+ )
+
+ # resolve dependency names from branch labels and symbolic
+ # names
+ self._add_depends_on(all_revisions, cast(_RevisionMapType, map_))
+
+ for rev in map_.values():
+ for downrev in rev._all_down_revisions:
+ if downrev not in map_:
+ util.warn(
+ "Revision %s referenced from %s is not present"
+ % (downrev, rev)
+ )
+ down_revision = map_[downrev]
+ down_revision.add_nextrev(rev)
+ if downrev in rev._versioned_down_revisions:
+ heads.discard(down_revision)
+ _real_heads.discard(down_revision)
+
+ # once the map has downrevisions populated, the dependencies
+ # can be further refined to include only those which are not
+ # already ancestors
+ self._normalize_depends_on(all_revisions, cast(_RevisionMapType, map_))
+ self._detect_cycles(rev_map, heads, bases, _real_heads, _real_bases)
+
+ revision_map: _RevisionMapType = dict(map_.items())
+ revision_map[None] = revision_map[()] = None
+ self.heads = tuple(rev.revision for rev in heads)
+ self._real_heads = tuple(rev.revision for rev in _real_heads)
+ self.bases = tuple(rev.revision for rev in bases)
+ self._real_bases = tuple(rev.revision for rev in _real_bases)
+
+ self._add_branches(has_branch_labels, revision_map)
+ return revision_map
+
+ def _detect_cycles(
+ self,
+ rev_map: _InterimRevisionMapType,
+ heads: Set[Revision],
+ bases: Tuple[Revision, ...],
+ _real_heads: Set[Revision],
+ _real_bases: Tuple[Revision, ...],
+ ) -> None:
+ if not rev_map:
+ return
+ if not heads or not bases:
+ raise CycleDetected(list(rev_map))
+ total_space = {
+ rev.revision
+ for rev in self._iterate_related_revisions(
+ lambda r: r._versioned_down_revisions,
+ heads,
+ map_=cast(_RevisionMapType, rev_map),
+ )
+ }.intersection(
+ rev.revision
+ for rev in self._iterate_related_revisions(
+ lambda r: r.nextrev,
+ bases,
+ map_=cast(_RevisionMapType, rev_map),
+ )
+ )
+ deleted_revs = set(rev_map.keys()) - total_space
+ if deleted_revs:
+ raise CycleDetected(sorted(deleted_revs))
+
+ if not _real_heads or not _real_bases:
+ raise DependencyCycleDetected(list(rev_map))
+ total_space = {
+ rev.revision
+ for rev in self._iterate_related_revisions(
+ lambda r: r._all_down_revisions,
+ _real_heads,
+ map_=cast(_RevisionMapType, rev_map),
+ )
+ }.intersection(
+ rev.revision
+ for rev in self._iterate_related_revisions(
+ lambda r: r._all_nextrev,
+ _real_bases,
+ map_=cast(_RevisionMapType, rev_map),
+ )
+ )
+ deleted_revs = set(rev_map.keys()) - total_space
+ if deleted_revs:
+ raise DependencyCycleDetected(sorted(deleted_revs))
+
+ def _map_branch_labels(
+ self, revisions: Collection[Revision], map_: _RevisionMapType
+ ) -> None:
+ for revision in revisions:
+ if revision.branch_labels:
+ assert revision._orig_branch_labels is not None
+ for branch_label in revision._orig_branch_labels:
+ if branch_label in map_:
+ map_rev = map_[branch_label]
+ assert map_rev is not None
+ raise RevisionError(
+ "Branch name '%s' in revision %s already "
+ "used by revision %s"
+ % (
+ branch_label,
+ revision.revision,
+ map_rev.revision,
+ )
+ )
+ map_[branch_label] = revision
+
+ def _add_branches(
+ self, revisions: Collection[Revision], map_: _RevisionMapType
+ ) -> None:
+ for revision in revisions:
+ if revision.branch_labels:
+ revision.branch_labels.update(revision.branch_labels)
+ for node in self._get_descendant_nodes(
+ [revision], map_, include_dependencies=False
+ ):
+ node.branch_labels.update(revision.branch_labels)
+
+ parent = node
+ while (
+ parent
+ and not parent._is_real_branch_point
+ and not parent.is_merge_point
+ ):
+ parent.branch_labels.update(revision.branch_labels)
+ if parent.down_revision:
+ parent = map_[parent.down_revision]
+ else:
+ break
+
+ def _add_depends_on(
+ self, revisions: Collection[Revision], map_: _RevisionMapType
+ ) -> None:
+ """Resolve the 'dependencies' for each revision in a collection
+ in terms of actual revision ids, as opposed to branch labels or other
+ symbolic names.
+
+ The collection is then assigned to the _resolved_dependencies
+ attribute on each revision object.
+
+ """
+
+ for revision in revisions:
+ if revision.dependencies:
+ deps = [
+ map_[dep] for dep in util.to_tuple(revision.dependencies)
+ ]
+ revision._resolved_dependencies = tuple(
+ [d.revision for d in deps if d is not None]
+ )
+ else:
+ revision._resolved_dependencies = ()
+
+ def _normalize_depends_on(
+ self, revisions: Collection[Revision], map_: _RevisionMapType
+ ) -> None:
+ """Create a collection of "dependencies" that omits dependencies
+ that are already ancestor nodes for each revision in a given
+ collection.
+
+ This builds upon the _resolved_dependencies collection created in the
+ _add_depends_on() method, looking in the fully populated revision map
+ for ancestors, and omitting them as the _resolved_dependencies
+ collection as it is copied to a new collection. The new collection is
+ then assigned to the _normalized_resolved_dependencies attribute on
+ each revision object.
+
+ The collection is then used to determine the immediate "down revision"
+ identifiers for this revision.
+
+ """
+
+ for revision in revisions:
+ if revision._resolved_dependencies:
+ normalized_resolved = set(revision._resolved_dependencies)
+ for rev in self._get_ancestor_nodes(
+ [revision],
+ include_dependencies=False,
+ map_=cast(_RevisionMapType, map_),
+ ):
+ if rev is revision:
+ continue
+ elif rev._resolved_dependencies:
+ normalized_resolved.difference_update(
+ rev._resolved_dependencies
+ )
+
+ revision._normalized_resolved_dependencies = tuple(
+ normalized_resolved
+ )
+ else:
+ revision._normalized_resolved_dependencies = ()
+
+ def add_revision(self, revision: Revision, _replace: bool = False) -> None:
+ """add a single revision to an existing map.
+
+ This method is for single-revision use cases, it's not
+ appropriate for fully populating an entire revision map.
+
+ """
+ map_ = self._revision_map
+ if not _replace and revision.revision in map_:
+ util.warn(
+ "Revision %s is present more than once" % revision.revision
+ )
+ elif _replace and revision.revision not in map_:
+ raise Exception("revision %s not in map" % revision.revision)
+
+ map_[revision.revision] = revision
+
+ revisions = [revision]
+ self._add_branches(revisions, map_)
+ self._map_branch_labels(revisions, map_)
+ self._add_depends_on(revisions, map_)
+
+ if revision.is_base:
+ self.bases += (revision.revision,)
+ if revision._is_real_base:
+ self._real_bases += (revision.revision,)
+
+ for downrev in revision._all_down_revisions:
+ if downrev not in map_:
+ util.warn(
+ "Revision %s referenced from %s is not present"
+ % (downrev, revision)
+ )
+ not_none(map_[downrev]).add_nextrev(revision)
+
+ self._normalize_depends_on(revisions, map_)
+
+ if revision._is_real_head:
+ self._real_heads = tuple(
+ head
+ for head in self._real_heads
+ if head
+ not in set(revision._all_down_revisions).union(
+ [revision.revision]
+ )
+ ) + (revision.revision,)
+ if revision.is_head:
+ self.heads = tuple(
+ head
+ for head in self.heads
+ if head
+ not in set(revision._versioned_down_revisions).union(
+ [revision.revision]
+ )
+ ) + (revision.revision,)
+
+ def get_current_head(
+ self, branch_label: Optional[str] = None
+ ) -> Optional[str]:
+ """Return the current head revision.
+
+ If the script directory has multiple heads
+ due to branching, an error is raised;
+ :meth:`.ScriptDirectory.get_heads` should be
+ preferred.
+
+ :param branch_label: optional branch name which will limit the
+ heads considered to those which include that branch_label.
+
+ :return: a string revision number.
+
+ .. seealso::
+
+ :meth:`.ScriptDirectory.get_heads`
+
+ """
+ current_heads: Sequence[str] = self.heads
+ if branch_label:
+ current_heads = self.filter_for_lineage(
+ current_heads, branch_label
+ )
+ if len(current_heads) > 1:
+ raise MultipleHeads(
+ current_heads,
+ "%s@head" % branch_label if branch_label else "head",
+ )
+
+ if current_heads:
+ return current_heads[0]
+ else:
+ return None
+
+ def _get_base_revisions(self, identifier: str) -> Tuple[str, ...]:
+ return self.filter_for_lineage(self.bases, identifier)
+
+ def get_revisions(
+ self, id_: Optional[_GetRevArg]
+ ) -> Tuple[Optional[_RevisionOrBase], ...]:
+ """Return the :class:`.Revision` instances with the given rev id
+ or identifiers.
+
+ May be given a single identifier, a sequence of identifiers, or the
+ special symbols "head" or "base". The result is a tuple of one
+ or more identifiers, or an empty tuple in the case of "base".
+
+ In the cases where 'head', 'heads' is requested and the
+ revision map is empty, returns an empty tuple.
+
+ Supports partial identifiers, where the given identifier
+ is matched against all identifiers that start with the given
+ characters; if there is exactly one match, that determines the
+ full revision.
+
+ """
+
+ if isinstance(id_, (list, tuple, set, frozenset)):
+ return sum([self.get_revisions(id_elem) for id_elem in id_], ())
+ else:
+ resolved_id, branch_label = self._resolve_revision_number(id_)
+ if len(resolved_id) == 1:
+ try:
+ rint = int(resolved_id[0])
+ if rint < 0:
+ # branch@-n -> walk down from heads
+ select_heads = self.get_revisions("heads")
+ if branch_label is not None:
+ select_heads = tuple(
+ head
+ for head in select_heads
+ if branch_label
+ in is_revision(head).branch_labels
+ )
+ return tuple(
+ self._walk(head, steps=rint)
+ for head in select_heads
+ )
+ except ValueError:
+ # couldn't resolve as integer
+ pass
+ return tuple(
+ self._revision_for_ident(rev_id, branch_label)
+ for rev_id in resolved_id
+ )
+
+ def get_revision(self, id_: Optional[str]) -> Optional[Revision]:
+ """Return the :class:`.Revision` instance with the given rev id.
+
+ If a symbolic name such as "head" or "base" is given, resolves
+ the identifier into the current head or base revision. If the symbolic
+ name refers to multiples, :class:`.MultipleHeads` is raised.
+
+ Supports partial identifiers, where the given identifier
+ is matched against all identifiers that start with the given
+ characters; if there is exactly one match, that determines the
+ full revision.
+
+ """
+
+ resolved_id, branch_label = self._resolve_revision_number(id_)
+ if len(resolved_id) > 1:
+ raise MultipleHeads(resolved_id, id_)
+
+ resolved: Union[str, Tuple[()]] = resolved_id[0] if resolved_id else ()
+ return self._revision_for_ident(resolved, branch_label)
+
+ def _resolve_branch(self, branch_label: str) -> Optional[Revision]:
+ try:
+ branch_rev = self._revision_map[branch_label]
+ except KeyError:
+ try:
+ nonbranch_rev = self._revision_for_ident(branch_label)
+ except ResolutionError as re:
+ raise ResolutionError(
+ "No such branch: '%s'" % branch_label, branch_label
+ ) from re
+
+ else:
+ return nonbranch_rev
+ else:
+ return branch_rev
+
+ def _revision_for_ident(
+ self,
+ resolved_id: Union[str, Tuple[()], None],
+ check_branch: Optional[str] = None,
+ ) -> Optional[Revision]:
+ branch_rev: Optional[Revision]
+ if check_branch:
+ branch_rev = self._resolve_branch(check_branch)
+ else:
+ branch_rev = None
+
+ revision: Union[Optional[Revision], Literal[False]]
+ try:
+ revision = self._revision_map[resolved_id]
+ except KeyError:
+ # break out to avoid misleading py3k stack traces
+ revision = False
+ revs: Sequence[str]
+ if revision is False:
+ assert resolved_id
+ # do a partial lookup
+ revs = [
+ x
+ for x in self._revision_map
+ if x and len(x) > 3 and x.startswith(resolved_id)
+ ]
+
+ if branch_rev:
+ revs = self.filter_for_lineage(revs, check_branch)
+ if not revs:
+ raise ResolutionError(
+ "No such revision or branch '%s'%s"
+ % (
+ resolved_id,
+ (
+ "; please ensure at least four characters are "
+ "present for partial revision identifier matches"
+ if len(resolved_id) < 4
+ else ""
+ ),
+ ),
+ resolved_id,
+ )
+ elif len(revs) > 1:
+ raise ResolutionError(
+ "Multiple revisions start "
+ "with '%s': %s..."
+ % (resolved_id, ", ".join("'%s'" % r for r in revs[0:3])),
+ resolved_id,
+ )
+ else:
+ revision = self._revision_map[revs[0]]
+
+ if check_branch and revision is not None:
+ assert branch_rev is not None
+ assert resolved_id
+ if not self._shares_lineage(
+ revision.revision, branch_rev.revision
+ ):
+ raise ResolutionError(
+ "Revision %s is not a member of branch '%s'"
+ % (revision.revision, check_branch),
+ resolved_id,
+ )
+ return revision
+
+ def _filter_into_branch_heads(
+ self, targets: Iterable[Optional[_RevisionOrBase]]
+ ) -> Set[Optional[_RevisionOrBase]]:
+ targets = set(targets)
+
+ for rev in list(targets):
+ assert rev
+ if targets.intersection(
+ self._get_descendant_nodes([rev], include_dependencies=False)
+ ).difference([rev]):
+ targets.discard(rev)
+ return targets
+
+ def filter_for_lineage(
+ self,
+ targets: Iterable[_TR],
+ check_against: Optional[str],
+ include_dependencies: bool = False,
+ ) -> Tuple[_TR, ...]:
+ id_, branch_label = self._resolve_revision_number(check_against)
+
+ shares = []
+ if branch_label:
+ shares.append(branch_label)
+ if id_:
+ shares.extend(id_)
+
+ return tuple(
+ tg
+ for tg in targets
+ if self._shares_lineage(
+ tg, shares, include_dependencies=include_dependencies
+ )
+ )
+
+ def _shares_lineage(
+ self,
+ target: Optional[_RevisionOrStr],
+ test_against_revs: Sequence[_RevisionOrStr],
+ include_dependencies: bool = False,
+ ) -> bool:
+ if not test_against_revs:
+ return True
+ if not isinstance(target, Revision):
+ resolved_target = not_none(self._revision_for_ident(target))
+ else:
+ resolved_target = target
+
+ resolved_test_against_revs = [
+ self._revision_for_ident(test_against_rev)
+ if not isinstance(test_against_rev, Revision)
+ else test_against_rev
+ for test_against_rev in util.to_tuple(
+ test_against_revs, default=()
+ )
+ ]
+
+ return bool(
+ set(
+ self._get_descendant_nodes(
+ [resolved_target],
+ include_dependencies=include_dependencies,
+ )
+ )
+ .union(
+ self._get_ancestor_nodes(
+ [resolved_target],
+ include_dependencies=include_dependencies,
+ )
+ )
+ .intersection(resolved_test_against_revs)
+ )
+
+ def _resolve_revision_number(
+ self, id_: Optional[_GetRevArg]
+ ) -> Tuple[Tuple[str, ...], Optional[str]]:
+ branch_label: Optional[str]
+ if isinstance(id_, str) and "@" in id_:
+ branch_label, id_ = id_.split("@", 1)
+
+ elif id_ is not None and (
+ (isinstance(id_, tuple) and id_ and not isinstance(id_[0], str))
+ or not isinstance(id_, (str, tuple))
+ ):
+ raise RevisionError(
+ "revision identifier %r is not a string; ensure database "
+ "driver settings are correct" % (id_,)
+ )
+
+ else:
+ branch_label = None
+
+ # ensure map is loaded
+ self._revision_map
+ if id_ == "heads":
+ if branch_label:
+ return (
+ self.filter_for_lineage(self.heads, branch_label),
+ branch_label,
+ )
+ else:
+ return self._real_heads, branch_label
+ elif id_ == "head":
+ current_head = self.get_current_head(branch_label)
+ if current_head:
+ return (current_head,), branch_label
+ else:
+ return (), branch_label
+ elif id_ == "base" or id_ is None:
+ return (), branch_label
+ else:
+ return util.to_tuple(id_, default=None), branch_label
+
+ def iterate_revisions(
+ self,
+ upper: _RevisionIdentifierType,
+ lower: _RevisionIdentifierType,
+ implicit_base: bool = False,
+ inclusive: bool = False,
+ assert_relative_length: bool = True,
+ select_for_downgrade: bool = False,
+ ) -> Iterator[Revision]:
+ """Iterate through script revisions, starting at the given
+ upper revision identifier and ending at the lower.
+
+ The traversal uses strictly the `down_revision`
+ marker inside each migration script, so
+ it is a requirement that upper >= lower,
+ else you'll get nothing back.
+
+ The iterator yields :class:`.Revision` objects.
+
+ """
+ fn: Callable
+ if select_for_downgrade:
+ fn = self._collect_downgrade_revisions
+ else:
+ fn = self._collect_upgrade_revisions
+
+ revisions, heads = fn(
+ upper,
+ lower,
+ inclusive=inclusive,
+ implicit_base=implicit_base,
+ assert_relative_length=assert_relative_length,
+ )
+
+ for node in self._topological_sort(revisions, heads):
+ yield not_none(self.get_revision(node))
+
+ def _get_descendant_nodes(
+ self,
+ targets: Collection[Optional[_RevisionOrBase]],
+ map_: Optional[_RevisionMapType] = None,
+ check: bool = False,
+ omit_immediate_dependencies: bool = False,
+ include_dependencies: bool = True,
+ ) -> Iterator[Any]:
+ if omit_immediate_dependencies:
+
+ def fn(rev):
+ if rev not in targets:
+ return rev._all_nextrev
+ else:
+ return rev.nextrev
+
+ elif include_dependencies:
+
+ def fn(rev):
+ return rev._all_nextrev
+
+ else:
+
+ def fn(rev):
+ return rev.nextrev
+
+ return self._iterate_related_revisions(
+ fn, targets, map_=map_, check=check
+ )
+
+ def _get_ancestor_nodes(
+ self,
+ targets: Collection[Optional[_RevisionOrBase]],
+ map_: Optional[_RevisionMapType] = None,
+ check: bool = False,
+ include_dependencies: bool = True,
+ ) -> Iterator[Revision]:
+ if include_dependencies:
+
+ def fn(rev):
+ return rev._normalized_down_revisions
+
+ else:
+
+ def fn(rev):
+ return rev._versioned_down_revisions
+
+ return self._iterate_related_revisions(
+ fn, targets, map_=map_, check=check
+ )
+
+ def _iterate_related_revisions(
+ self,
+ fn: Callable,
+ targets: Collection[Optional[_RevisionOrBase]],
+ map_: Optional[_RevisionMapType],
+ check: bool = False,
+ ) -> Iterator[Revision]:
+ if map_ is None:
+ map_ = self._revision_map
+
+ seen = set()
+ todo: Deque[Revision] = collections.deque()
+ for target_for in targets:
+ target = is_revision(target_for)
+ todo.append(target)
+ if check:
+ per_target = set()
+
+ while todo:
+ rev = todo.pop()
+ if check:
+ per_target.add(rev)
+
+ if rev in seen:
+ continue
+ seen.add(rev)
+ # Check for map errors before collecting.
+ for rev_id in fn(rev):
+ next_rev = map_[rev_id]
+ assert next_rev is not None
+ if next_rev.revision != rev_id:
+ raise RevisionError(
+ "Dependency resolution failed; broken map"
+ )
+ todo.append(next_rev)
+ yield rev
+ if check:
+ overlaps = per_target.intersection(targets).difference(
+ [target]
+ )
+ if overlaps:
+ raise RevisionError(
+ "Requested revision %s overlaps with "
+ "other requested revisions %s"
+ % (
+ target.revision,
+ ", ".join(r.revision for r in overlaps),
+ )
+ )
+
+ def _topological_sort(
+ self,
+ revisions: Collection[Revision],
+ heads: Any,
+ ) -> List[str]:
+ """Yield revision ids of a collection of Revision objects in
+ topological sorted order (i.e. revisions always come after their
+ down_revisions and dependencies). Uses the order of keys in
+ _revision_map to sort.
+
+ """
+
+ id_to_rev = self._revision_map
+
+ def get_ancestors(rev_id):
+ return {
+ r.revision
+ for r in self._get_ancestor_nodes([id_to_rev[rev_id]])
+ }
+
+ todo = {d.revision for d in revisions}
+
+ # Use revision map (ordered dict) key order to pre-sort.
+ inserted_order = list(self._revision_map)
+
+ current_heads = list(
+ sorted(
+ {d.revision for d in heads if d.revision in todo},
+ key=inserted_order.index,
+ )
+ )
+ ancestors_by_idx = [get_ancestors(rev_id) for rev_id in current_heads]
+
+ output = []
+
+ current_candidate_idx = 0
+ while current_heads:
+ candidate = current_heads[current_candidate_idx]
+
+ for check_head_index, ancestors in enumerate(ancestors_by_idx):
+ # scan all the heads. see if we can continue walking
+ # down the current branch indicated by current_candidate_idx.
+ if (
+ check_head_index != current_candidate_idx
+ and candidate in ancestors
+ ):
+ current_candidate_idx = check_head_index
+ # nope, another head is dependent on us, they have
+ # to be traversed first
+ break
+ else:
+ # yup, we can emit
+ if candidate in todo:
+ output.append(candidate)
+ todo.remove(candidate)
+
+ # now update the heads with our ancestors.
+
+ candidate_rev = id_to_rev[candidate]
+ assert candidate_rev is not None
+
+ heads_to_add = [
+ r
+ for r in candidate_rev._normalized_down_revisions
+ if r in todo and r not in current_heads
+ ]
+
+ if not heads_to_add:
+ # no ancestors, so remove this head from the list
+ del current_heads[current_candidate_idx]
+ del ancestors_by_idx[current_candidate_idx]
+ current_candidate_idx = max(current_candidate_idx - 1, 0)
+ else:
+ if (
+ not candidate_rev._normalized_resolved_dependencies
+ and len(candidate_rev._versioned_down_revisions) == 1
+ ):
+ current_heads[current_candidate_idx] = heads_to_add[0]
+
+ # for plain movement down a revision line without
+ # any mergepoints, branchpoints, or deps, we
+ # can update the ancestors collection directly
+ # by popping out the candidate we just emitted
+ ancestors_by_idx[current_candidate_idx].discard(
+ candidate
+ )
+
+ else:
+ # otherwise recalculate it again, things get
+ # complicated otherwise. This can possibly be
+ # improved to not run the whole ancestor thing
+ # each time but it was getting complicated
+ current_heads[current_candidate_idx] = heads_to_add[0]
+ current_heads.extend(heads_to_add[1:])
+ ancestors_by_idx[
+ current_candidate_idx
+ ] = get_ancestors(heads_to_add[0])
+ ancestors_by_idx.extend(
+ get_ancestors(head) for head in heads_to_add[1:]
+ )
+
+ assert not todo
+ return output
+
+ def _walk(
+ self,
+ start: Optional[Union[str, Revision]],
+ steps: int,
+ branch_label: Optional[str] = None,
+ no_overwalk: bool = True,
+ ) -> Optional[_RevisionOrBase]:
+ """
+ Walk the requested number of :steps up (steps > 0) or down (steps < 0)
+ the revision tree.
+
+ :branch_label is used to select branches only when walking up.
+
+ If the walk goes past the boundaries of the tree and :no_overwalk is
+ True, None is returned, otherwise the walk terminates early.
+
+ A RevisionError is raised if there is no unambiguous revision to
+ walk to.
+ """
+ initial: Optional[_RevisionOrBase]
+ if isinstance(start, str):
+ initial = self.get_revision(start)
+ else:
+ initial = start
+
+ children: Sequence[Optional[_RevisionOrBase]]
+ for _ in range(abs(steps)):
+ if steps > 0:
+ assert initial != "base"
+ # Walk up
+ walk_up = [
+ is_revision(rev)
+ for rev in self.get_revisions(
+ self.bases if initial is None else initial.nextrev
+ )
+ ]
+ if branch_label:
+ children = self.filter_for_lineage(walk_up, branch_label)
+ else:
+ children = walk_up
+ else:
+ # Walk down
+ if initial == "base":
+ children = ()
+ else:
+ children = self.get_revisions(
+ self.heads
+ if initial is None
+ else initial.down_revision
+ )
+ if not children:
+ children = ("base",)
+ if not children:
+ # This will return an invalid result if no_overwalk, otherwise
+ # further steps will stay where we are.
+ ret = None if no_overwalk else initial
+ return ret
+ elif len(children) > 1:
+ raise RevisionError("Ambiguous walk")
+ initial = children[0]
+
+ return initial
+
+ def _parse_downgrade_target(
+ self,
+ current_revisions: _RevisionIdentifierType,
+ target: _RevisionIdentifierType,
+ assert_relative_length: bool,
+ ) -> Tuple[Optional[str], Optional[_RevisionOrBase]]:
+ """
+ Parse downgrade command syntax :target to retrieve the target revision
+ and branch label (if any) given the :current_revisions stamp of the
+ database.
+
+ Returns a tuple (branch_label, target_revision) where branch_label
+ is a string from the command specifying the branch to consider (or
+ None if no branch given), and target_revision is a Revision object
+ which the command refers to. target_revisions is None if the command
+ refers to 'base'. The target may be specified in absolute form, or
+ relative to :current_revisions.
+ """
+ if target is None:
+ return None, None
+ assert isinstance(
+ target, str
+ ), "Expected downgrade target in string form"
+ match = _relative_destination.match(target)
+ if match:
+ branch_label, symbol, relative = match.groups()
+ rel_int = int(relative)
+ if rel_int >= 0:
+ if symbol is None:
+ # Downgrading to current + n is not valid.
+ raise RevisionError(
+ "Relative revision %s didn't "
+ "produce %d migrations" % (relative, abs(rel_int))
+ )
+ # Find target revision relative to given symbol.
+ rev = self._walk(
+ symbol,
+ rel_int,
+ branch_label,
+ no_overwalk=assert_relative_length,
+ )
+ if rev is None:
+ raise RevisionError("Walked too far")
+ return branch_label, rev
+ else:
+ relative_revision = symbol is None
+ if relative_revision:
+ # Find target revision relative to current state.
+ if branch_label:
+ cr_tuple = util.to_tuple(current_revisions)
+ symbol_list: Sequence[str]
+ symbol_list = self.filter_for_lineage(
+ cr_tuple, branch_label
+ )
+ if not symbol_list:
+ # check the case where there are multiple branches
+ # but there is currently a single heads, since all
+ # other branch heads are dependent of the current
+ # single heads.
+ all_current = cast(
+ Set[Revision], self._get_all_current(cr_tuple)
+ )
+ sl_all_current = self.filter_for_lineage(
+ all_current, branch_label
+ )
+ symbol_list = [
+ r.revision if r else r # type: ignore[misc]
+ for r in sl_all_current
+ ]
+
+ assert len(symbol_list) == 1
+ symbol = symbol_list[0]
+ else:
+ current_revisions = util.to_tuple(current_revisions)
+ if not current_revisions:
+ raise RevisionError(
+ "Relative revision %s didn't "
+ "produce %d migrations"
+ % (relative, abs(rel_int))
+ )
+ # Have to check uniques here for duplicate rows test.
+ if len(set(current_revisions)) > 1:
+ util.warn(
+ "downgrade -1 from multiple heads is "
+ "ambiguous; "
+ "this usage will be disallowed in a future "
+ "release."
+ )
+ symbol = current_revisions[0]
+ # Restrict iteration to just the selected branch when
+ # ambiguous branches are involved.
+ branch_label = symbol
+ # Walk down the tree to find downgrade target.
+ rev = self._walk(
+ start=self.get_revision(symbol)
+ if branch_label is None
+ else self.get_revision("%s@%s" % (branch_label, symbol)),
+ steps=rel_int,
+ no_overwalk=assert_relative_length,
+ )
+ if rev is None:
+ if relative_revision:
+ raise RevisionError(
+ "Relative revision %s didn't "
+ "produce %d migrations" % (relative, abs(rel_int))
+ )
+ else:
+ raise RevisionError("Walked too far")
+ return branch_label, rev
+
+ # No relative destination given, revision specified is absolute.
+ branch_label, _, symbol = target.rpartition("@")
+ if not branch_label:
+ branch_label = None # type:ignore[assignment]
+ return branch_label, self.get_revision(symbol)
+
+ def _parse_upgrade_target(
+ self,
+ current_revisions: _RevisionIdentifierType,
+ target: _RevisionIdentifierType,
+ assert_relative_length: bool,
+ ) -> Tuple[Optional[_RevisionOrBase], ...]:
+ """
+ Parse upgrade command syntax :target to retrieve the target revision
+ and given the :current_revisions stamp of the database.
+
+ Returns a tuple of Revision objects which should be iterated/upgraded
+ to. The target may be specified in absolute form, or relative to
+ :current_revisions.
+ """
+ if isinstance(target, str):
+ match = _relative_destination.match(target)
+ else:
+ match = None
+
+ if not match:
+ # No relative destination, target is absolute.
+ return self.get_revisions(target)
+
+ current_revisions_tup: Union[str, Tuple[Optional[str], ...], None]
+ current_revisions_tup = util.to_tuple(current_revisions)
+
+ branch_label, symbol, relative_str = match.groups()
+ relative = int(relative_str)
+ if relative > 0:
+ if symbol is None:
+ if not current_revisions_tup:
+ current_revisions_tup = (None,)
+ # Try to filter to a single target (avoid ambiguous branches).
+ start_revs = current_revisions_tup
+ if branch_label:
+ start_revs = self.filter_for_lineage(
+ self.get_revisions(current_revisions_tup), # type: ignore[arg-type] # noqa: E501
+ branch_label,
+ )
+ if not start_revs:
+ # The requested branch is not a head, so we need to
+ # backtrack to find a branchpoint.
+ active_on_branch = self.filter_for_lineage(
+ self._get_ancestor_nodes(
+ self.get_revisions(current_revisions_tup)
+ ),
+ branch_label,
+ )
+ # Find the tips of this set of revisions (revisions
+ # without children within the set).
+ start_revs = tuple(
+ {rev.revision for rev in active_on_branch}
+ - {
+ down
+ for rev in active_on_branch
+ for down in rev._normalized_down_revisions
+ }
+ )
+ if not start_revs:
+ # We must need to go right back to base to find
+ # a starting point for this branch.
+ start_revs = (None,)
+ if len(start_revs) > 1:
+ raise RevisionError(
+ "Ambiguous upgrade from multiple current revisions"
+ )
+ # Walk up from unique target revision.
+ rev = self._walk(
+ start=start_revs[0],
+ steps=relative,
+ branch_label=branch_label,
+ no_overwalk=assert_relative_length,
+ )
+ if rev is None:
+ raise RevisionError(
+ "Relative revision %s didn't "
+ "produce %d migrations" % (relative_str, abs(relative))
+ )
+ return (rev,)
+ else:
+ # Walk is relative to a given revision, not the current state.
+ return (
+ self._walk(
+ start=self.get_revision(symbol),
+ steps=relative,
+ branch_label=branch_label,
+ no_overwalk=assert_relative_length,
+ ),
+ )
+ else:
+ if symbol is None:
+ # Upgrading to current - n is not valid.
+ raise RevisionError(
+ "Relative revision %s didn't "
+ "produce %d migrations" % (relative, abs(relative))
+ )
+ return (
+ self._walk(
+ start=self.get_revision(symbol)
+ if branch_label is None
+ else self.get_revision("%s@%s" % (branch_label, symbol)),
+ steps=relative,
+ no_overwalk=assert_relative_length,
+ ),
+ )
+
+ def _collect_downgrade_revisions(
+ self,
+ upper: _RevisionIdentifierType,
+ target: _RevisionIdentifierType,
+ inclusive: bool,
+ implicit_base: bool,
+ assert_relative_length: bool,
+ ) -> Any:
+ """
+ Compute the set of current revisions specified by :upper, and the
+ downgrade target specified by :target. Return all dependents of target
+ which are currently active.
+
+ :inclusive=True includes the target revision in the set
+ """
+
+ branch_label, target_revision = self._parse_downgrade_target(
+ current_revisions=upper,
+ target=target,
+ assert_relative_length=assert_relative_length,
+ )
+ if target_revision == "base":
+ target_revision = None
+ assert target_revision is None or isinstance(target_revision, Revision)
+
+ roots: List[Revision]
+ # Find candidates to drop.
+ if target_revision is None:
+ # Downgrading back to base: find all tree roots.
+ roots = [
+ rev
+ for rev in self._revision_map.values()
+ if rev is not None and rev.down_revision is None
+ ]
+ elif inclusive:
+ # inclusive implies target revision should also be dropped
+ roots = [target_revision]
+ else:
+ # Downgrading to fixed target: find all direct children.
+ roots = [
+ is_revision(rev)
+ for rev in self.get_revisions(target_revision.nextrev)
+ ]
+
+ if branch_label and len(roots) > 1:
+ # Need to filter roots.
+ ancestors = {
+ rev.revision
+ for rev in self._get_ancestor_nodes(
+ [self._resolve_branch(branch_label)],
+ include_dependencies=False,
+ )
+ }
+ # Intersection gives the root revisions we are trying to
+ # rollback with the downgrade.
+ roots = [
+ is_revision(rev)
+ for rev in self.get_revisions(
+ {rev.revision for rev in roots}.intersection(ancestors)
+ )
+ ]
+
+ # Ensure we didn't throw everything away when filtering branches.
+ if len(roots) == 0:
+ raise RevisionError(
+ "Not a valid downgrade target from current heads"
+ )
+
+ heads = self.get_revisions(upper)
+
+ # Aim is to drop :branch_revision; to do so we also need to drop its
+ # descendents and anything dependent on it.
+ downgrade_revisions = set(
+ self._get_descendant_nodes(
+ roots,
+ include_dependencies=True,
+ omit_immediate_dependencies=False,
+ )
+ )
+ active_revisions = set(
+ self._get_ancestor_nodes(heads, include_dependencies=True)
+ )
+
+ # Emit revisions to drop in reverse topological sorted order.
+ downgrade_revisions.intersection_update(active_revisions)
+
+ if implicit_base:
+ # Wind other branches back to base.
+ downgrade_revisions.update(
+ active_revisions.difference(self._get_ancestor_nodes(roots))
+ )
+
+ if (
+ target_revision is not None
+ and not downgrade_revisions
+ and target_revision not in heads
+ ):
+ # Empty intersection: target revs are not present.
+
+ raise RangeNotAncestorError("Nothing to drop", upper)
+
+ return downgrade_revisions, heads
+
+ def _collect_upgrade_revisions(
+ self,
+ upper: _RevisionIdentifierType,
+ lower: _RevisionIdentifierType,
+ inclusive: bool,
+ implicit_base: bool,
+ assert_relative_length: bool,
+ ) -> Tuple[Set[Revision], Tuple[Optional[_RevisionOrBase]]]:
+ """
+ Compute the set of required revisions specified by :upper, and the
+ current set of active revisions specified by :lower. Find the
+ difference between the two to compute the required upgrades.
+
+ :inclusive=True includes the current/lower revisions in the set
+
+ :implicit_base=False only returns revisions which are downstream
+ of the current/lower revisions. Dependencies from branches with
+ different bases will not be included.
+ """
+ targets: Collection[Revision] = [
+ is_revision(rev)
+ for rev in self._parse_upgrade_target(
+ current_revisions=lower,
+ target=upper,
+ assert_relative_length=assert_relative_length,
+ )
+ ]
+
+ # assert type(targets) is tuple, "targets should be a tuple"
+
+ # Handled named bases (e.g. branch@... -> heads should only produce
+ # targets on the given branch)
+ if isinstance(lower, str) and "@" in lower:
+ branch, _, _ = lower.partition("@")
+ branch_rev = self.get_revision(branch)
+ if branch_rev is not None and branch_rev.revision == branch:
+ # A revision was used as a label; get its branch instead
+ assert len(branch_rev.branch_labels) == 1
+ branch = next(iter(branch_rev.branch_labels))
+ targets = {
+ need for need in targets if branch in need.branch_labels
+ }
+
+ required_node_set = set(
+ self._get_ancestor_nodes(
+ targets, check=True, include_dependencies=True
+ )
+ ).union(targets)
+
+ current_revisions = self.get_revisions(lower)
+ if not implicit_base and any(
+ rev not in required_node_set
+ for rev in current_revisions
+ if rev is not None
+ ):
+ raise RangeNotAncestorError(lower, upper)
+ assert (
+ type(current_revisions) is tuple
+ ), "current_revisions should be a tuple"
+
+ # Special case where lower = a relative value (get_revisions can't
+ # find it)
+ if current_revisions and current_revisions[0] is None:
+ _, rev = self._parse_downgrade_target(
+ current_revisions=upper,
+ target=lower,
+ assert_relative_length=assert_relative_length,
+ )
+ assert rev
+ if rev == "base":
+ current_revisions = tuple()
+ lower = None
+ else:
+ current_revisions = (rev,)
+ lower = rev.revision
+
+ current_node_set = set(
+ self._get_ancestor_nodes(
+ current_revisions, check=True, include_dependencies=True
+ )
+ ).union(current_revisions)
+
+ needs = required_node_set.difference(current_node_set)
+
+ # Include the lower revision (=current_revisions?) in the iteration
+ if inclusive:
+ needs.update(is_revision(rev) for rev in self.get_revisions(lower))
+ # By default, base is implicit as we want all dependencies returned.
+ # Base is also implicit if lower = base
+ # implicit_base=False -> only return direct downstreams of
+ # current_revisions
+ if current_revisions and not implicit_base:
+ lower_descendents = self._get_descendant_nodes(
+ [is_revision(rev) for rev in current_revisions],
+ check=True,
+ include_dependencies=False,
+ )
+ needs.intersection_update(lower_descendents)
+
+ return needs, tuple(targets) # type:ignore[return-value]
+
+ def _get_all_current(
+ self, id_: Tuple[str, ...]
+ ) -> Set[Optional[_RevisionOrBase]]:
+ top_revs: Set[Optional[_RevisionOrBase]]
+ top_revs = set(self.get_revisions(id_))
+ top_revs.update(
+ self._get_ancestor_nodes(list(top_revs), include_dependencies=True)
+ )
+ return self._filter_into_branch_heads(top_revs)
+
+
+class Revision:
+ """Base class for revisioned objects.
+
+ The :class:`.Revision` class is the base of the more public-facing
+ :class:`.Script` object, which represents a migration script.
+ The mechanics of revision management and traversal are encapsulated
+ within :class:`.Revision`, while :class:`.Script` applies this logic
+ to Python files in a version directory.
+
+ """
+
+ nextrev: FrozenSet[str] = frozenset()
+ """following revisions, based on down_revision only."""
+
+ _all_nextrev: FrozenSet[str] = frozenset()
+
+ revision: str = None # type: ignore[assignment]
+ """The string revision number."""
+
+ down_revision: Optional[_RevIdType] = None
+ """The ``down_revision`` identifier(s) within the migration script.
+
+ Note that the total set of "down" revisions is
+ down_revision + dependencies.
+
+ """
+
+ dependencies: Optional[_RevIdType] = None
+ """Additional revisions which this revision is dependent on.
+
+ From a migration standpoint, these dependencies are added to the
+ down_revision to form the full iteration. However, the separation
+ of down_revision from "dependencies" is to assist in navigating
+ a history that contains many branches, typically a multi-root scenario.
+
+ """
+
+ branch_labels: Set[str] = None # type: ignore[assignment]
+ """Optional string/tuple of symbolic names to apply to this
+ revision's branch"""
+
+ _resolved_dependencies: Tuple[str, ...]
+ _normalized_resolved_dependencies: Tuple[str, ...]
+
+ @classmethod
+ def verify_rev_id(cls, revision: str) -> None:
+ illegal_chars = set(revision).intersection(_revision_illegal_chars)
+ if illegal_chars:
+ raise RevisionError(
+ "Character(s) '%s' not allowed in revision identifier '%s'"
+ % (", ".join(sorted(illegal_chars)), revision)
+ )
+
+ def __init__(
+ self,
+ revision: str,
+ down_revision: Optional[Union[str, Tuple[str, ...]]],
+ dependencies: Optional[Union[str, Tuple[str, ...]]] = None,
+ branch_labels: Optional[Union[str, Tuple[str, ...]]] = None,
+ ) -> None:
+ if down_revision and revision in util.to_tuple(down_revision):
+ raise LoopDetected(revision)
+ elif dependencies is not None and revision in util.to_tuple(
+ dependencies
+ ):
+ raise DependencyLoopDetected(revision)
+
+ self.verify_rev_id(revision)
+ self.revision = revision
+ self.down_revision = tuple_rev_as_scalar(util.to_tuple(down_revision))
+ self.dependencies = tuple_rev_as_scalar(util.to_tuple(dependencies))
+ self._orig_branch_labels = util.to_tuple(branch_labels, default=())
+ self.branch_labels = set(self._orig_branch_labels)
+
+ def __repr__(self) -> str:
+ args = [repr(self.revision), repr(self.down_revision)]
+ if self.dependencies:
+ args.append("dependencies=%r" % (self.dependencies,))
+ if self.branch_labels:
+ args.append("branch_labels=%r" % (self.branch_labels,))
+ return "%s(%s)" % (self.__class__.__name__, ", ".join(args))
+
+ def add_nextrev(self, revision: Revision) -> None:
+ self._all_nextrev = self._all_nextrev.union([revision.revision])
+ if self.revision in revision._versioned_down_revisions:
+ self.nextrev = self.nextrev.union([revision.revision])
+
+ @property
+ def _all_down_revisions(self) -> Tuple[str, ...]:
+ return util.dedupe_tuple(
+ util.to_tuple(self.down_revision, default=())
+ + self._resolved_dependencies
+ )
+
+ @property
+ def _normalized_down_revisions(self) -> Tuple[str, ...]:
+ """return immediate down revisions for a rev, omitting dependencies
+ that are still dependencies of ancestors.
+
+ """
+ return util.dedupe_tuple(
+ util.to_tuple(self.down_revision, default=())
+ + self._normalized_resolved_dependencies
+ )
+
+ @property
+ def _versioned_down_revisions(self) -> Tuple[str, ...]:
+ return util.to_tuple(self.down_revision, default=())
+
+ @property
+ def is_head(self) -> bool:
+ """Return True if this :class:`.Revision` is a 'head' revision.
+
+ This is determined based on whether any other :class:`.Script`
+ within the :class:`.ScriptDirectory` refers to this
+ :class:`.Script`. Multiple heads can be present.
+
+ """
+ return not bool(self.nextrev)
+
+ @property
+ def _is_real_head(self) -> bool:
+ return not bool(self._all_nextrev)
+
+ @property
+ def is_base(self) -> bool:
+ """Return True if this :class:`.Revision` is a 'base' revision."""
+
+ return self.down_revision is None
+
+ @property
+ def _is_real_base(self) -> bool:
+ """Return True if this :class:`.Revision` is a "real" base revision,
+ e.g. that it has no dependencies either."""
+
+ # we use self.dependencies here because this is called up
+ # in initialization where _real_dependencies isn't set up
+ # yet
+ return self.down_revision is None and self.dependencies is None
+
+ @property
+ def is_branch_point(self) -> bool:
+ """Return True if this :class:`.Script` is a branch point.
+
+ A branchpoint is defined as a :class:`.Script` which is referred
+ to by more than one succeeding :class:`.Script`, that is more
+ than one :class:`.Script` has a `down_revision` identifier pointing
+ here.
+
+ """
+ return len(self.nextrev) > 1
+
+ @property
+ def _is_real_branch_point(self) -> bool:
+ """Return True if this :class:`.Script` is a 'real' branch point,
+ taking into account dependencies as well.
+
+ """
+ return len(self._all_nextrev) > 1
+
+ @property
+ def is_merge_point(self) -> bool:
+ """Return True if this :class:`.Script` is a merge point."""
+
+ return len(self._versioned_down_revisions) > 1
+
+
+@overload
+def tuple_rev_as_scalar(rev: None) -> None:
+ ...
+
+
+@overload
+def tuple_rev_as_scalar(
+ rev: Union[Tuple[_T, ...], List[_T]]
+) -> Union[_T, Tuple[_T, ...], List[_T]]:
+ ...
+
+
+def tuple_rev_as_scalar(
+ rev: Optional[Sequence[_T]],
+) -> Union[_T, Sequence[_T], None]:
+ if not rev:
+ return None
+ elif len(rev) == 1:
+ return rev[0]
+ else:
+ return rev
+
+
+def is_revision(rev: Any) -> Revision:
+ assert isinstance(rev, Revision)
+ return rev
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/script/write_hooks.py b/Backend/venv/lib/python3.12/site-packages/alembic/script/write_hooks.py
new file mode 100644
index 00000000..b44ce644
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/script/write_hooks.py
@@ -0,0 +1,176 @@
+from __future__ import annotations
+
+import shlex
+import subprocess
+import sys
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import Union
+
+from .. import util
+from ..util import compat
+
+
+REVISION_SCRIPT_TOKEN = "REVISION_SCRIPT_FILENAME"
+
+_registry: dict = {}
+
+
+def register(name: str) -> Callable:
+ """A function decorator that will register that function as a write hook.
+
+ See the documentation linked below for an example.
+
+ .. seealso::
+
+ :ref:`post_write_hooks_custom`
+
+
+ """
+
+ def decorate(fn):
+ _registry[name] = fn
+ return fn
+
+ return decorate
+
+
+def _invoke(
+ name: str, revision: str, options: Mapping[str, Union[str, int]]
+) -> Any:
+ """Invokes the formatter registered for the given name.
+
+ :param name: The name of a formatter in the registry
+ :param revision: A :class:`.MigrationRevision` instance
+ :param options: A dict containing kwargs passed to the
+ specified formatter.
+ :raises: :class:`alembic.util.CommandError`
+ """
+ try:
+ hook = _registry[name]
+ except KeyError as ke:
+ raise util.CommandError(
+ f"No formatter with name '{name}' registered"
+ ) from ke
+ else:
+ return hook(revision, options)
+
+
+def _run_hooks(path: str, hook_config: Mapping[str, str]) -> None:
+ """Invoke hooks for a generated revision."""
+
+ from .base import _split_on_space_comma
+
+ names = _split_on_space_comma.split(hook_config.get("hooks", ""))
+
+ for name in names:
+ if not name:
+ continue
+ opts = {
+ key[len(name) + 1 :]: hook_config[key]
+ for key in hook_config
+ if key.startswith(name + ".")
+ }
+ opts["_hook_name"] = name
+ try:
+ type_ = opts["type"]
+ except KeyError as ke:
+ raise util.CommandError(
+ f"Key {name}.type is required for post write hook {name!r}"
+ ) from ke
+ else:
+ with util.status(
+ f"Running post write hook {name!r}", newline=True
+ ):
+ _invoke(type_, path, opts)
+
+
+def _parse_cmdline_options(cmdline_options_str: str, path: str) -> List[str]:
+ """Parse options from a string into a list.
+
+ Also substitutes the revision script token with the actual filename of
+ the revision script.
+
+ If the revision script token doesn't occur in the options string, it is
+ automatically prepended.
+ """
+ if REVISION_SCRIPT_TOKEN not in cmdline_options_str:
+ cmdline_options_str = REVISION_SCRIPT_TOKEN + " " + cmdline_options_str
+ cmdline_options_list = shlex.split(
+ cmdline_options_str, posix=compat.is_posix
+ )
+ cmdline_options_list = [
+ option.replace(REVISION_SCRIPT_TOKEN, path)
+ for option in cmdline_options_list
+ ]
+ return cmdline_options_list
+
+
+@register("console_scripts")
+def console_scripts(
+ path: str, options: dict, ignore_output: bool = False
+) -> None:
+ try:
+ entrypoint_name = options["entrypoint"]
+ except KeyError as ke:
+ raise util.CommandError(
+ f"Key {options['_hook_name']}.entrypoint is required for post "
+ f"write hook {options['_hook_name']!r}"
+ ) from ke
+ for entry in compat.importlib_metadata_get("console_scripts"):
+ if entry.name == entrypoint_name:
+ impl: Any = entry
+ break
+ else:
+ raise util.CommandError(
+ f"Could not find entrypoint console_scripts.{entrypoint_name}"
+ )
+ cwd: Optional[str] = options.get("cwd", None)
+ cmdline_options_str = options.get("options", "")
+ cmdline_options_list = _parse_cmdline_options(cmdline_options_str, path)
+
+ kw: Dict[str, Any] = {}
+ if ignore_output:
+ kw["stdout"] = kw["stderr"] = subprocess.DEVNULL
+
+ subprocess.run(
+ [
+ sys.executable,
+ "-c",
+ f"import {impl.module}; {impl.module}.{impl.attr}()",
+ ]
+ + cmdline_options_list,
+ cwd=cwd,
+ **kw,
+ )
+
+
+@register("exec")
+def exec_(path: str, options: dict, ignore_output: bool = False) -> None:
+ try:
+ executable = options["executable"]
+ except KeyError as ke:
+ raise util.CommandError(
+ f"Key {options['_hook_name']}.executable is required for post "
+ f"write hook {options['_hook_name']!r}"
+ ) from ke
+ cwd: Optional[str] = options.get("cwd", None)
+ cmdline_options_str = options.get("options", "")
+ cmdline_options_list = _parse_cmdline_options(cmdline_options_str, path)
+
+ kw: Dict[str, Any] = {}
+ if ignore_output:
+ kw["stdout"] = kw["stderr"] = subprocess.DEVNULL
+
+ subprocess.run(
+ [
+ executable,
+ *cmdline_options_list,
+ ],
+ cwd=cwd,
+ **kw,
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/templates/async/README b/Backend/venv/lib/python3.12/site-packages/alembic/templates/async/README
new file mode 100644
index 00000000..e0d0858f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/templates/async/README
@@ -0,0 +1 @@
+Generic single-database configuration with an async dbapi.
\ No newline at end of file
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/templates/async/__pycache__/env.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/templates/async/__pycache__/env.cpython-312.pyc
new file mode 100644
index 00000000..987f0138
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/templates/async/__pycache__/env.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/templates/async/alembic.ini.mako b/Backend/venv/lib/python3.12/site-packages/alembic/templates/async/alembic.ini.mako
new file mode 100644
index 00000000..bc9f2d50
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/templates/async/alembic.ini.mako
@@ -0,0 +1,114 @@
+# A generic, single database configuration.
+
+[alembic]
+# path to migration scripts
+script_location = ${script_location}
+
+# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
+# Uncomment the line below if you want the files to be prepended with date and time
+# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
+
+# sys.path path, will be prepended to sys.path if present.
+# defaults to the current working directory.
+prepend_sys_path = .
+
+# timezone to use when rendering the date within the migration file
+# as well as the filename.
+# If specified, requires the python-dateutil library that can be
+# installed by adding `alembic[tz]` to the pip requirements
+# string value is passed to dateutil.tz.gettz()
+# leave blank for localtime
+# timezone =
+
+# max length of characters to apply to the
+# "slug" field
+# truncate_slug_length = 40
+
+# set to 'true' to run the environment during
+# the 'revision' command, regardless of autogenerate
+# revision_environment = false
+
+# set to 'true' to allow .pyc and .pyo files without
+# a source .py file to be detected as revisions in the
+# versions/ directory
+# sourceless = false
+
+# version location specification; This defaults
+# to ${script_location}/versions. When using multiple version
+# directories, initial revisions must be specified with --version-path.
+# The path separator used here should be the separator specified by "version_path_separator" below.
+# version_locations = %(here)s/bar:%(here)s/bat:${script_location}/versions
+
+# version path separator; As mentioned above, this is the character used to split
+# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
+# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
+# Valid values for version_path_separator are:
+#
+# version_path_separator = :
+# version_path_separator = ;
+# version_path_separator = space
+version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
+
+# set to 'true' to search source files recursively
+# in each "version_locations" directory
+# new in Alembic version 1.10
+# recursive_version_locations = false
+
+# the output encoding used when revision files
+# are written from script.py.mako
+# output_encoding = utf-8
+
+sqlalchemy.url = driver://user:pass@localhost/dbname
+
+
+[post_write_hooks]
+# post_write_hooks defines scripts or Python functions that are run
+# on newly generated revision scripts. See the documentation for further
+# detail and examples
+
+# format using "black" - use the console_scripts runner, against the "black" entrypoint
+# hooks = black
+# black.type = console_scripts
+# black.entrypoint = black
+# black.options = -l 79 REVISION_SCRIPT_FILENAME
+
+# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
+# hooks = ruff
+# ruff.type = exec
+# ruff.executable = %(here)s/.venv/bin/ruff
+# ruff.options = --fix REVISION_SCRIPT_FILENAME
+
+# Logging configuration
+[loggers]
+keys = root,sqlalchemy,alembic
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = WARN
+handlers = console
+qualname =
+
+[logger_sqlalchemy]
+level = WARN
+handlers =
+qualname = sqlalchemy.engine
+
+[logger_alembic]
+level = INFO
+handlers =
+qualname = alembic
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatter_generic]
+format = %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %H:%M:%S
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/templates/async/env.py b/Backend/venv/lib/python3.12/site-packages/alembic/templates/async/env.py
new file mode 100644
index 00000000..9f2d5194
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/templates/async/env.py
@@ -0,0 +1,89 @@
+import asyncio
+from logging.config import fileConfig
+
+from sqlalchemy import pool
+from sqlalchemy.engine import Connection
+from sqlalchemy.ext.asyncio import async_engine_from_config
+
+from alembic import context
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+# Interpret the config file for Python logging.
+# This line sets up loggers basically.
+if config.config_file_name is not None:
+ fileConfig(config.config_file_name)
+
+# add your model's MetaData object here
+# for 'autogenerate' support
+# from myapp import mymodel
+# target_metadata = mymodel.Base.metadata
+target_metadata = None
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+
+def run_migrations_offline() -> None:
+ """Run migrations in 'offline' mode.
+
+ This configures the context with just a URL
+ and not an Engine, though an Engine is acceptable
+ here as well. By skipping the Engine creation
+ we don't even need a DBAPI to be available.
+
+ Calls to context.execute() here emit the given string to the
+ script output.
+
+ """
+ url = config.get_main_option("sqlalchemy.url")
+ context.configure(
+ url=url,
+ target_metadata=target_metadata,
+ literal_binds=True,
+ dialect_opts={"paramstyle": "named"},
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+def do_run_migrations(connection: Connection) -> None:
+ context.configure(connection=connection, target_metadata=target_metadata)
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+async def run_async_migrations() -> None:
+ """In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+
+ connectable = async_engine_from_config(
+ config.get_section(config.config_ini_section, {}),
+ prefix="sqlalchemy.",
+ poolclass=pool.NullPool,
+ )
+
+ async with connectable.connect() as connection:
+ await connection.run_sync(do_run_migrations)
+
+ await connectable.dispose()
+
+
+def run_migrations_online() -> None:
+ """Run migrations in 'online' mode."""
+
+ asyncio.run(run_async_migrations())
+
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/templates/async/script.py.mako b/Backend/venv/lib/python3.12/site-packages/alembic/templates/async/script.py.mako
new file mode 100644
index 00000000..fbc4b07d
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/templates/async/script.py.mako
@@ -0,0 +1,26 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+# revision identifiers, used by Alembic.
+revision: str = ${repr(up_revision)}
+down_revision: Union[str, None] = ${repr(down_revision)}
+branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
+depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
+
+
+def upgrade() -> None:
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade() -> None:
+ ${downgrades if downgrades else "pass"}
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/templates/generic/README b/Backend/venv/lib/python3.12/site-packages/alembic/templates/generic/README
new file mode 100644
index 00000000..98e4f9c4
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/templates/generic/README
@@ -0,0 +1 @@
+Generic single-database configuration.
\ No newline at end of file
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/templates/generic/__pycache__/env.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/templates/generic/__pycache__/env.cpython-312.pyc
new file mode 100644
index 00000000..95e3eeec
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/templates/generic/__pycache__/env.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/templates/generic/alembic.ini.mako b/Backend/venv/lib/python3.12/site-packages/alembic/templates/generic/alembic.ini.mako
new file mode 100644
index 00000000..c18ddb4e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/templates/generic/alembic.ini.mako
@@ -0,0 +1,116 @@
+# A generic, single database configuration.
+
+[alembic]
+# path to migration scripts
+script_location = ${script_location}
+
+# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
+# Uncomment the line below if you want the files to be prepended with date and time
+# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
+# for all available tokens
+# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
+
+# sys.path path, will be prepended to sys.path if present.
+# defaults to the current working directory.
+prepend_sys_path = .
+
+# timezone to use when rendering the date within the migration file
+# as well as the filename.
+# If specified, requires the python-dateutil library that can be
+# installed by adding `alembic[tz]` to the pip requirements
+# string value is passed to dateutil.tz.gettz()
+# leave blank for localtime
+# timezone =
+
+# max length of characters to apply to the
+# "slug" field
+# truncate_slug_length = 40
+
+# set to 'true' to run the environment during
+# the 'revision' command, regardless of autogenerate
+# revision_environment = false
+
+# set to 'true' to allow .pyc and .pyo files without
+# a source .py file to be detected as revisions in the
+# versions/ directory
+# sourceless = false
+
+# version location specification; This defaults
+# to ${script_location}/versions. When using multiple version
+# directories, initial revisions must be specified with --version-path.
+# The path separator used here should be the separator specified by "version_path_separator" below.
+# version_locations = %(here)s/bar:%(here)s/bat:${script_location}/versions
+
+# version path separator; As mentioned above, this is the character used to split
+# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
+# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
+# Valid values for version_path_separator are:
+#
+# version_path_separator = :
+# version_path_separator = ;
+# version_path_separator = space
+version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
+
+# set to 'true' to search source files recursively
+# in each "version_locations" directory
+# new in Alembic version 1.10
+# recursive_version_locations = false
+
+# the output encoding used when revision files
+# are written from script.py.mako
+# output_encoding = utf-8
+
+sqlalchemy.url = driver://user:pass@localhost/dbname
+
+
+[post_write_hooks]
+# post_write_hooks defines scripts or Python functions that are run
+# on newly generated revision scripts. See the documentation for further
+# detail and examples
+
+# format using "black" - use the console_scripts runner, against the "black" entrypoint
+# hooks = black
+# black.type = console_scripts
+# black.entrypoint = black
+# black.options = -l 79 REVISION_SCRIPT_FILENAME
+
+# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
+# hooks = ruff
+# ruff.type = exec
+# ruff.executable = %(here)s/.venv/bin/ruff
+# ruff.options = --fix REVISION_SCRIPT_FILENAME
+
+# Logging configuration
+[loggers]
+keys = root,sqlalchemy,alembic
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = WARN
+handlers = console
+qualname =
+
+[logger_sqlalchemy]
+level = WARN
+handlers =
+qualname = sqlalchemy.engine
+
+[logger_alembic]
+level = INFO
+handlers =
+qualname = alembic
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatter_generic]
+format = %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %H:%M:%S
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/templates/generic/env.py b/Backend/venv/lib/python3.12/site-packages/alembic/templates/generic/env.py
new file mode 100644
index 00000000..36112a3c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/templates/generic/env.py
@@ -0,0 +1,78 @@
+from logging.config import fileConfig
+
+from sqlalchemy import engine_from_config
+from sqlalchemy import pool
+
+from alembic import context
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+# Interpret the config file for Python logging.
+# This line sets up loggers basically.
+if config.config_file_name is not None:
+ fileConfig(config.config_file_name)
+
+# add your model's MetaData object here
+# for 'autogenerate' support
+# from myapp import mymodel
+# target_metadata = mymodel.Base.metadata
+target_metadata = None
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+
+def run_migrations_offline() -> None:
+ """Run migrations in 'offline' mode.
+
+ This configures the context with just a URL
+ and not an Engine, though an Engine is acceptable
+ here as well. By skipping the Engine creation
+ we don't even need a DBAPI to be available.
+
+ Calls to context.execute() here emit the given string to the
+ script output.
+
+ """
+ url = config.get_main_option("sqlalchemy.url")
+ context.configure(
+ url=url,
+ target_metadata=target_metadata,
+ literal_binds=True,
+ dialect_opts={"paramstyle": "named"},
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+def run_migrations_online() -> None:
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+ connectable = engine_from_config(
+ config.get_section(config.config_ini_section, {}),
+ prefix="sqlalchemy.",
+ poolclass=pool.NullPool,
+ )
+
+ with connectable.connect() as connection:
+ context.configure(
+ connection=connection, target_metadata=target_metadata
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/templates/generic/script.py.mako b/Backend/venv/lib/python3.12/site-packages/alembic/templates/generic/script.py.mako
new file mode 100644
index 00000000..fbc4b07d
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/templates/generic/script.py.mako
@@ -0,0 +1,26 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+# revision identifiers, used by Alembic.
+revision: str = ${repr(up_revision)}
+down_revision: Union[str, None] = ${repr(down_revision)}
+branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
+depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
+
+
+def upgrade() -> None:
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade() -> None:
+ ${downgrades if downgrades else "pass"}
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/templates/multidb/README b/Backend/venv/lib/python3.12/site-packages/alembic/templates/multidb/README
new file mode 100644
index 00000000..f046ec91
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/templates/multidb/README
@@ -0,0 +1,12 @@
+Rudimentary multi-database configuration.
+
+Multi-DB isn't vastly different from generic. The primary difference is that it
+will run the migrations N times (depending on how many databases you have
+configured), providing one engine name and associated context for each run.
+
+That engine name will then allow the migration to restrict what runs within it to
+just the appropriate migrations for that engine. You can see this behavior within
+the mako template.
+
+In the provided configuration, you'll need to have `databases` provided in
+alembic's config, and an `sqlalchemy.url` provided for each engine name.
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/templates/multidb/__pycache__/env.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/templates/multidb/__pycache__/env.cpython-312.pyc
new file mode 100644
index 00000000..a704cdbf
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/templates/multidb/__pycache__/env.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/templates/multidb/alembic.ini.mako b/Backend/venv/lib/python3.12/site-packages/alembic/templates/multidb/alembic.ini.mako
new file mode 100644
index 00000000..a9ea0755
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/templates/multidb/alembic.ini.mako
@@ -0,0 +1,121 @@
+# a multi-database configuration.
+
+[alembic]
+# path to migration scripts
+script_location = ${script_location}
+
+# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
+# Uncomment the line below if you want the files to be prepended with date and time
+# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
+# for all available tokens
+# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
+
+# sys.path path, will be prepended to sys.path if present.
+# defaults to the current working directory.
+prepend_sys_path = .
+
+# timezone to use when rendering the date within the migration file
+# as well as the filename.
+# If specified, requires the python-dateutil library that can be
+# installed by adding `alembic[tz]` to the pip requirements
+# string value is passed to dateutil.tz.gettz()
+# leave blank for localtime
+# timezone =
+
+# max length of characters to apply to the
+# "slug" field
+# truncate_slug_length = 40
+
+# set to 'true' to run the environment during
+# the 'revision' command, regardless of autogenerate
+# revision_environment = false
+
+# set to 'true' to allow .pyc and .pyo files without
+# a source .py file to be detected as revisions in the
+# versions/ directory
+# sourceless = false
+
+# version location specification; This defaults
+# to ${script_location}/versions. When using multiple version
+# directories, initial revisions must be specified with --version-path.
+# The path separator used here should be the separator specified by "version_path_separator" below.
+# version_locations = %(here)s/bar:%(here)s/bat:${script_location}/versions
+
+# version path separator; As mentioned above, this is the character used to split
+# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
+# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
+# Valid values for version_path_separator are:
+#
+# version_path_separator = :
+# version_path_separator = ;
+# version_path_separator = space
+version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
+
+# set to 'true' to search source files recursively
+# in each "version_locations" directory
+# new in Alembic version 1.10
+# recursive_version_locations = false
+
+# the output encoding used when revision files
+# are written from script.py.mako
+# output_encoding = utf-8
+
+databases = engine1, engine2
+
+[engine1]
+sqlalchemy.url = driver://user:pass@localhost/dbname
+
+[engine2]
+sqlalchemy.url = driver://user:pass@localhost/dbname2
+
+[post_write_hooks]
+# post_write_hooks defines scripts or Python functions that are run
+# on newly generated revision scripts. See the documentation for further
+# detail and examples
+
+# format using "black" - use the console_scripts runner, against the "black" entrypoint
+# hooks = black
+# black.type = console_scripts
+# black.entrypoint = black
+# black.options = -l 79 REVISION_SCRIPT_FILENAME
+
+# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
+# hooks = ruff
+# ruff.type = exec
+# ruff.executable = %(here)s/.venv/bin/ruff
+# ruff.options = --fix REVISION_SCRIPT_FILENAME
+
+# Logging configuration
+[loggers]
+keys = root,sqlalchemy,alembic
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = WARN
+handlers = console
+qualname =
+
+[logger_sqlalchemy]
+level = WARN
+handlers =
+qualname = sqlalchemy.engine
+
+[logger_alembic]
+level = INFO
+handlers =
+qualname = alembic
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatter_generic]
+format = %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %H:%M:%S
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/templates/multidb/env.py b/Backend/venv/lib/python3.12/site-packages/alembic/templates/multidb/env.py
new file mode 100644
index 00000000..e937b64e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/templates/multidb/env.py
@@ -0,0 +1,140 @@
+import logging
+from logging.config import fileConfig
+import re
+
+from sqlalchemy import engine_from_config
+from sqlalchemy import pool
+
+from alembic import context
+
+USE_TWOPHASE = False
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+# Interpret the config file for Python logging.
+# This line sets up loggers basically.
+if config.config_file_name is not None:
+ fileConfig(config.config_file_name)
+logger = logging.getLogger("alembic.env")
+
+# gather section names referring to different
+# databases. These are named "engine1", "engine2"
+# in the sample .ini file.
+db_names = config.get_main_option("databases", "")
+
+# add your model's MetaData objects here
+# for 'autogenerate' support. These must be set
+# up to hold just those tables targeting a
+# particular database. table.tometadata() may be
+# helpful here in case a "copy" of
+# a MetaData is needed.
+# from myapp import mymodel
+# target_metadata = {
+# 'engine1':mymodel.metadata1,
+# 'engine2':mymodel.metadata2
+# }
+target_metadata = {}
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+
+def run_migrations_offline() -> None:
+ """Run migrations in 'offline' mode.
+
+ This configures the context with just a URL
+ and not an Engine, though an Engine is acceptable
+ here as well. By skipping the Engine creation
+ we don't even need a DBAPI to be available.
+
+ Calls to context.execute() here emit the given string to the
+ script output.
+
+ """
+ # for the --sql use case, run migrations for each URL into
+ # individual files.
+
+ engines = {}
+ for name in re.split(r",\s*", db_names):
+ engines[name] = rec = {}
+ rec["url"] = context.config.get_section_option(name, "sqlalchemy.url")
+
+ for name, rec in engines.items():
+ logger.info("Migrating database %s" % name)
+ file_ = "%s.sql" % name
+ logger.info("Writing output to %s" % file_)
+ with open(file_, "w") as buffer:
+ context.configure(
+ url=rec["url"],
+ output_buffer=buffer,
+ target_metadata=target_metadata.get(name),
+ literal_binds=True,
+ dialect_opts={"paramstyle": "named"},
+ )
+ with context.begin_transaction():
+ context.run_migrations(engine_name=name)
+
+
+def run_migrations_online() -> None:
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+
+ # for the direct-to-DB use case, start a transaction on all
+ # engines, then run all migrations, then commit all transactions.
+
+ engines = {}
+ for name in re.split(r",\s*", db_names):
+ engines[name] = rec = {}
+ rec["engine"] = engine_from_config(
+ context.config.get_section(name, {}),
+ prefix="sqlalchemy.",
+ poolclass=pool.NullPool,
+ )
+
+ for name, rec in engines.items():
+ engine = rec["engine"]
+ rec["connection"] = conn = engine.connect()
+
+ if USE_TWOPHASE:
+ rec["transaction"] = conn.begin_twophase()
+ else:
+ rec["transaction"] = conn.begin()
+
+ try:
+ for name, rec in engines.items():
+ logger.info("Migrating database %s" % name)
+ context.configure(
+ connection=rec["connection"],
+ upgrade_token="%s_upgrades" % name,
+ downgrade_token="%s_downgrades" % name,
+ target_metadata=target_metadata.get(name),
+ )
+ context.run_migrations(engine_name=name)
+
+ if USE_TWOPHASE:
+ for rec in engines.values():
+ rec["transaction"].prepare()
+
+ for rec in engines.values():
+ rec["transaction"].commit()
+ except:
+ for rec in engines.values():
+ rec["transaction"].rollback()
+ raise
+ finally:
+ for rec in engines.values():
+ rec["connection"].close()
+
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/templates/multidb/script.py.mako b/Backend/venv/lib/python3.12/site-packages/alembic/templates/multidb/script.py.mako
new file mode 100644
index 00000000..6108b8a0
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/templates/multidb/script.py.mako
@@ -0,0 +1,47 @@
+<%!
+import re
+
+%>"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+# revision identifiers, used by Alembic.
+revision: str = ${repr(up_revision)}
+down_revision: Union[str, None] = ${repr(down_revision)}
+branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
+depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
+
+
+def upgrade(engine_name: str) -> None:
+ globals()["upgrade_%s" % engine_name]()
+
+
+def downgrade(engine_name: str) -> None:
+ globals()["downgrade_%s" % engine_name]()
+
+<%
+ db_names = config.get_main_option("databases")
+%>
+
+## generate an "upgrade_() / downgrade_()" function
+## for each database name in the ini file.
+
+% for db_name in re.split(r',\s*', db_names):
+
+def upgrade_${db_name}() -> None:
+ ${context.get("%s_upgrades" % db_name, "pass")}
+
+
+def downgrade_${db_name}() -> None:
+ ${context.get("%s_downgrades" % db_name, "pass")}
+
+% endfor
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/__init__.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/__init__.py
new file mode 100644
index 00000000..0407adfe
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/testing/__init__.py
@@ -0,0 +1,29 @@
+from sqlalchemy.testing import config
+from sqlalchemy.testing import emits_warning
+from sqlalchemy.testing import engines
+from sqlalchemy.testing import exclusions
+from sqlalchemy.testing import mock
+from sqlalchemy.testing import provide_metadata
+from sqlalchemy.testing import skip_if
+from sqlalchemy.testing import uses_deprecated
+from sqlalchemy.testing.config import combinations
+from sqlalchemy.testing.config import fixture
+from sqlalchemy.testing.config import requirements as requires
+
+from .assertions import assert_raises
+from .assertions import assert_raises_message
+from .assertions import emits_python_deprecation_warning
+from .assertions import eq_
+from .assertions import eq_ignore_whitespace
+from .assertions import expect_raises
+from .assertions import expect_raises_message
+from .assertions import expect_sqlalchemy_deprecated
+from .assertions import expect_sqlalchemy_deprecated_20
+from .assertions import expect_warnings
+from .assertions import is_
+from .assertions import is_false
+from .assertions import is_not_
+from .assertions import is_true
+from .assertions import ne_
+from .fixtures import TestBase
+from .util import resolve_lambda
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..bc7c2133
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/assertions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/assertions.cpython-312.pyc
new file mode 100644
index 00000000..58372fa1
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/assertions.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/env.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/env.cpython-312.pyc
new file mode 100644
index 00000000..21acf495
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/env.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/fixtures.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/fixtures.cpython-312.pyc
new file mode 100644
index 00000000..18e8f3d9
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/fixtures.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/requirements.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/requirements.cpython-312.pyc
new file mode 100644
index 00000000..35e1d8f5
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/requirements.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/schemacompare.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/schemacompare.cpython-312.pyc
new file mode 100644
index 00000000..7253b2af
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/schemacompare.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/util.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/util.cpython-312.pyc
new file mode 100644
index 00000000..d0512d1b
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/util.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/warnings.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/warnings.cpython-312.pyc
new file mode 100644
index 00000000..f3fe352e
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/__pycache__/warnings.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/assertions.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/assertions.py
new file mode 100644
index 00000000..ec9593b7
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/testing/assertions.py
@@ -0,0 +1,167 @@
+from __future__ import annotations
+
+import contextlib
+import re
+import sys
+from typing import Any
+from typing import Dict
+
+from sqlalchemy import exc as sa_exc
+from sqlalchemy.engine import default
+from sqlalchemy.testing.assertions import _expect_warnings
+from sqlalchemy.testing.assertions import eq_ # noqa
+from sqlalchemy.testing.assertions import is_ # noqa
+from sqlalchemy.testing.assertions import is_false # noqa
+from sqlalchemy.testing.assertions import is_not_ # noqa
+from sqlalchemy.testing.assertions import is_true # noqa
+from sqlalchemy.testing.assertions import ne_ # noqa
+from sqlalchemy.util import decorator
+
+from ..util import sqla_compat
+
+
+def _assert_proper_exception_context(exception):
+ """assert that any exception we're catching does not have a __context__
+ without a __cause__, and that __suppress_context__ is never set.
+
+ Python 3 will report nested as exceptions as "during the handling of
+ error X, error Y occurred". That's not what we want to do. we want
+ these exceptions in a cause chain.
+
+ """
+
+ if (
+ exception.__context__ is not exception.__cause__
+ and not exception.__suppress_context__
+ ):
+ assert False, (
+ "Exception %r was correctly raised but did not set a cause, "
+ "within context %r as its cause."
+ % (exception, exception.__context__)
+ )
+
+
+def assert_raises(except_cls, callable_, *args, **kw):
+ return _assert_raises(except_cls, callable_, args, kw, check_context=True)
+
+
+def assert_raises_context_ok(except_cls, callable_, *args, **kw):
+ return _assert_raises(except_cls, callable_, args, kw)
+
+
+def assert_raises_message(except_cls, msg, callable_, *args, **kwargs):
+ return _assert_raises(
+ except_cls, callable_, args, kwargs, msg=msg, check_context=True
+ )
+
+
+def assert_raises_message_context_ok(
+ except_cls, msg, callable_, *args, **kwargs
+):
+ return _assert_raises(except_cls, callable_, args, kwargs, msg=msg)
+
+
+def _assert_raises(
+ except_cls, callable_, args, kwargs, msg=None, check_context=False
+):
+ with _expect_raises(except_cls, msg, check_context) as ec:
+ callable_(*args, **kwargs)
+ return ec.error
+
+
+class _ErrorContainer:
+ error: Any = None
+
+
+@contextlib.contextmanager
+def _expect_raises(except_cls, msg=None, check_context=False):
+ ec = _ErrorContainer()
+ if check_context:
+ are_we_already_in_a_traceback = sys.exc_info()[0]
+ try:
+ yield ec
+ success = False
+ except except_cls as err:
+ ec.error = err
+ success = True
+ if msg is not None:
+ assert re.search(msg, str(err), re.UNICODE), f"{msg} !~ {err}"
+ if check_context and not are_we_already_in_a_traceback:
+ _assert_proper_exception_context(err)
+ print(str(err).encode("utf-8"))
+
+ # assert outside the block so it works for AssertionError too !
+ assert success, "Callable did not raise an exception"
+
+
+def expect_raises(except_cls, check_context=True):
+ return _expect_raises(except_cls, check_context=check_context)
+
+
+def expect_raises_message(except_cls, msg, check_context=True):
+ return _expect_raises(except_cls, msg=msg, check_context=check_context)
+
+
+def eq_ignore_whitespace(a, b, msg=None):
+ a = re.sub(r"^\s+?|\n", "", a)
+ a = re.sub(r" {2,}", " ", a)
+ b = re.sub(r"^\s+?|\n", "", b)
+ b = re.sub(r" {2,}", " ", b)
+
+ assert a == b, msg or "%r != %r" % (a, b)
+
+
+_dialect_mods: Dict[Any, Any] = {}
+
+
+def _get_dialect(name):
+ if name is None or name == "default":
+ return default.DefaultDialect()
+ else:
+ d = sqla_compat._create_url(name).get_dialect()()
+
+ if name == "postgresql":
+ d.implicit_returning = True
+ elif name == "mssql":
+ d.legacy_schema_aliasing = False
+ return d
+
+
+def expect_warnings(*messages, **kw):
+ """Context manager which expects one or more warnings.
+
+ With no arguments, squelches all SAWarnings emitted via
+ sqlalchemy.util.warn and sqlalchemy.util.warn_limited. Otherwise
+ pass string expressions that will match selected warnings via regex;
+ all non-matching warnings are sent through.
+
+ The expect version **asserts** that the warnings were in fact seen.
+
+ Note that the test suite sets SAWarning warnings to raise exceptions.
+
+ """
+ return _expect_warnings(Warning, messages, **kw)
+
+
+def emits_python_deprecation_warning(*messages):
+ """Decorator form of expect_warnings().
+
+ Note that emits_warning does **not** assert that the warnings
+ were in fact seen.
+
+ """
+
+ @decorator
+ def decorate(fn, *args, **kw):
+ with _expect_warnings(DeprecationWarning, assert_=False, *messages):
+ return fn(*args, **kw)
+
+ return decorate
+
+
+def expect_sqlalchemy_deprecated(*messages, **kw):
+ return _expect_warnings(sa_exc.SADeprecationWarning, messages, **kw)
+
+
+def expect_sqlalchemy_deprecated_20(*messages, **kw):
+ return _expect_warnings(sa_exc.RemovedIn20Warning, messages, **kw)
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/env.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/env.py
new file mode 100644
index 00000000..5df7ef82
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/testing/env.py
@@ -0,0 +1,518 @@
+import importlib.machinery
+import os
+import shutil
+import textwrap
+
+from sqlalchemy.testing import config
+from sqlalchemy.testing import provision
+
+from . import util as testing_util
+from .. import command
+from .. import script
+from .. import util
+from ..script import Script
+from ..script import ScriptDirectory
+
+
+def _get_staging_directory():
+ if provision.FOLLOWER_IDENT:
+ return "scratch_%s" % provision.FOLLOWER_IDENT
+ else:
+ return "scratch"
+
+
+def staging_env(create=True, template="generic", sourceless=False):
+ cfg = _testing_config()
+ if create:
+ path = os.path.join(_get_staging_directory(), "scripts")
+ assert not os.path.exists(path), (
+ "staging directory %s already exists; poor cleanup?" % path
+ )
+
+ command.init(cfg, path, template=template)
+ if sourceless:
+ try:
+ # do an import so that a .pyc/.pyo is generated.
+ util.load_python_file(path, "env.py")
+ except AttributeError:
+ # we don't have the migration context set up yet
+ # so running the .env py throws this exception.
+ # theoretically we could be using py_compiler here to
+ # generate .pyc/.pyo without importing but not really
+ # worth it.
+ pass
+ assert sourceless in (
+ "pep3147_envonly",
+ "simple",
+ "pep3147_everything",
+ ), sourceless
+ make_sourceless(
+ os.path.join(path, "env.py"),
+ "pep3147" if "pep3147" in sourceless else "simple",
+ )
+
+ sc = script.ScriptDirectory.from_config(cfg)
+ return sc
+
+
+def clear_staging_env():
+ from sqlalchemy.testing import engines
+
+ engines.testing_reaper.close_all()
+ shutil.rmtree(_get_staging_directory(), True)
+
+
+def script_file_fixture(txt):
+ dir_ = os.path.join(_get_staging_directory(), "scripts")
+ path = os.path.join(dir_, "script.py.mako")
+ with open(path, "w") as f:
+ f.write(txt)
+
+
+def env_file_fixture(txt):
+ dir_ = os.path.join(_get_staging_directory(), "scripts")
+ txt = (
+ """
+from alembic import context
+
+config = context.config
+"""
+ + txt
+ )
+
+ path = os.path.join(dir_, "env.py")
+ pyc_path = util.pyc_file_from_path(path)
+ if pyc_path:
+ os.unlink(pyc_path)
+
+ with open(path, "w") as f:
+ f.write(txt)
+
+
+def _sqlite_file_db(tempname="foo.db", future=False, scope=None, **options):
+ dir_ = os.path.join(_get_staging_directory(), "scripts")
+ url = "sqlite:///%s/%s" % (dir_, tempname)
+ if scope and util.sqla_14:
+ options["scope"] = scope
+ return testing_util.testing_engine(url=url, future=future, options=options)
+
+
+def _sqlite_testing_config(sourceless=False, future=False):
+ dir_ = os.path.join(_get_staging_directory(), "scripts")
+ url = "sqlite:///%s/foo.db" % dir_
+
+ sqlalchemy_future = future or ("future" in config.db.__class__.__module__)
+
+ return _write_config_file(
+ """
+[alembic]
+script_location = %s
+sqlalchemy.url = %s
+sourceless = %s
+%s
+
+[loggers]
+keys = root,sqlalchemy
+
+[handlers]
+keys = console
+
+[logger_root]
+level = WARN
+handlers = console
+qualname =
+
+[logger_sqlalchemy]
+level = DEBUG
+handlers =
+qualname = sqlalchemy.engine
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatters]
+keys = generic
+
+[formatter_generic]
+format = %%(levelname)-5.5s [%%(name)s] %%(message)s
+datefmt = %%H:%%M:%%S
+ """
+ % (
+ dir_,
+ url,
+ "true" if sourceless else "false",
+ "sqlalchemy.future = true" if sqlalchemy_future else "",
+ )
+ )
+
+
+def _multi_dir_testing_config(sourceless=False, extra_version_location=""):
+ dir_ = os.path.join(_get_staging_directory(), "scripts")
+ sqlalchemy_future = "future" in config.db.__class__.__module__
+
+ url = "sqlite:///%s/foo.db" % dir_
+
+ return _write_config_file(
+ """
+[alembic]
+script_location = %s
+sqlalchemy.url = %s
+sqlalchemy.future = %s
+sourceless = %s
+version_locations = %%(here)s/model1/ %%(here)s/model2/ %%(here)s/model3/ %s
+
+[loggers]
+keys = root
+
+[handlers]
+keys = console
+
+[logger_root]
+level = WARN
+handlers = console
+qualname =
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatters]
+keys = generic
+
+[formatter_generic]
+format = %%(levelname)-5.5s [%%(name)s] %%(message)s
+datefmt = %%H:%%M:%%S
+ """
+ % (
+ dir_,
+ url,
+ "true" if sqlalchemy_future else "false",
+ "true" if sourceless else "false",
+ extra_version_location,
+ )
+ )
+
+
+def _no_sql_testing_config(dialect="postgresql", directives=""):
+ """use a postgresql url with no host so that
+ connections guaranteed to fail"""
+ dir_ = os.path.join(_get_staging_directory(), "scripts")
+ return _write_config_file(
+ """
+[alembic]
+script_location = %s
+sqlalchemy.url = %s://
+%s
+
+[loggers]
+keys = root
+
+[handlers]
+keys = console
+
+[logger_root]
+level = WARN
+handlers = console
+qualname =
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatters]
+keys = generic
+
+[formatter_generic]
+format = %%(levelname)-5.5s [%%(name)s] %%(message)s
+datefmt = %%H:%%M:%%S
+
+"""
+ % (dir_, dialect, directives)
+ )
+
+
+def _write_config_file(text):
+ cfg = _testing_config()
+ with open(cfg.config_file_name, "w") as f:
+ f.write(text)
+ return cfg
+
+
+def _testing_config():
+ from alembic.config import Config
+
+ if not os.access(_get_staging_directory(), os.F_OK):
+ os.mkdir(_get_staging_directory())
+ return Config(os.path.join(_get_staging_directory(), "test_alembic.ini"))
+
+
+def write_script(
+ scriptdir, rev_id, content, encoding="ascii", sourceless=False
+):
+ old = scriptdir.revision_map.get_revision(rev_id)
+ path = old.path
+
+ content = textwrap.dedent(content)
+ if encoding:
+ content = content.encode(encoding)
+ with open(path, "wb") as fp:
+ fp.write(content)
+ pyc_path = util.pyc_file_from_path(path)
+ if pyc_path:
+ os.unlink(pyc_path)
+ script = Script._from_path(scriptdir, path)
+ old = scriptdir.revision_map.get_revision(script.revision)
+ if old.down_revision != script.down_revision:
+ raise Exception(
+ "Can't change down_revision " "on a refresh operation."
+ )
+ scriptdir.revision_map.add_revision(script, _replace=True)
+
+ if sourceless:
+ make_sourceless(
+ path, "pep3147" if sourceless == "pep3147_everything" else "simple"
+ )
+
+
+def make_sourceless(path, style):
+ import py_compile
+
+ py_compile.compile(path)
+
+ if style == "simple":
+ pyc_path = util.pyc_file_from_path(path)
+ suffix = importlib.machinery.BYTECODE_SUFFIXES[0]
+ filepath, ext = os.path.splitext(path)
+ simple_pyc_path = filepath + suffix
+ shutil.move(pyc_path, simple_pyc_path)
+ pyc_path = simple_pyc_path
+ else:
+ assert style in ("pep3147", "simple")
+ pyc_path = util.pyc_file_from_path(path)
+
+ assert os.access(pyc_path, os.F_OK)
+
+ os.unlink(path)
+
+
+def three_rev_fixture(cfg):
+ a = util.rev_id()
+ b = util.rev_id()
+ c = util.rev_id()
+
+ script = ScriptDirectory.from_config(cfg)
+ script.generate_revision(a, "revision a", refresh=True, head="base")
+ write_script(
+ script,
+ a,
+ """\
+"Rev A"
+revision = '%s'
+down_revision = None
+
+from alembic import op
+
+
+def upgrade():
+ op.execute("CREATE STEP 1")
+
+
+def downgrade():
+ op.execute("DROP STEP 1")
+
+"""
+ % a,
+ )
+
+ script.generate_revision(b, "revision b", refresh=True, head=a)
+ write_script(
+ script,
+ b,
+ f"""# coding: utf-8
+"Rev B, méil, %3"
+revision = '{b}'
+down_revision = '{a}'
+
+from alembic import op
+
+
+def upgrade():
+ op.execute("CREATE STEP 2")
+
+
+def downgrade():
+ op.execute("DROP STEP 2")
+
+""",
+ encoding="utf-8",
+ )
+
+ script.generate_revision(c, "revision c", refresh=True, head=b)
+ write_script(
+ script,
+ c,
+ """\
+"Rev C"
+revision = '%s'
+down_revision = '%s'
+
+from alembic import op
+
+
+def upgrade():
+ op.execute("CREATE STEP 3")
+
+
+def downgrade():
+ op.execute("DROP STEP 3")
+
+"""
+ % (c, b),
+ )
+ return a, b, c
+
+
+def multi_heads_fixture(cfg, a, b, c):
+ """Create a multiple head fixture from the three-revs fixture"""
+
+ # a->b->c
+ # -> d -> e
+ # -> f
+ d = util.rev_id()
+ e = util.rev_id()
+ f = util.rev_id()
+
+ script = ScriptDirectory.from_config(cfg)
+ script.generate_revision(
+ d, "revision d from b", head=b, splice=True, refresh=True
+ )
+ write_script(
+ script,
+ d,
+ """\
+"Rev D"
+revision = '%s'
+down_revision = '%s'
+
+from alembic import op
+
+
+def upgrade():
+ op.execute("CREATE STEP 4")
+
+
+def downgrade():
+ op.execute("DROP STEP 4")
+
+"""
+ % (d, b),
+ )
+
+ script.generate_revision(
+ e, "revision e from d", head=d, splice=True, refresh=True
+ )
+ write_script(
+ script,
+ e,
+ """\
+"Rev E"
+revision = '%s'
+down_revision = '%s'
+
+from alembic import op
+
+
+def upgrade():
+ op.execute("CREATE STEP 5")
+
+
+def downgrade():
+ op.execute("DROP STEP 5")
+
+"""
+ % (e, d),
+ )
+
+ script.generate_revision(
+ f, "revision f from b", head=b, splice=True, refresh=True
+ )
+ write_script(
+ script,
+ f,
+ """\
+"Rev F"
+revision = '%s'
+down_revision = '%s'
+
+from alembic import op
+
+
+def upgrade():
+ op.execute("CREATE STEP 6")
+
+
+def downgrade():
+ op.execute("DROP STEP 6")
+
+"""
+ % (f, b),
+ )
+
+ return d, e, f
+
+
+def _multidb_testing_config(engines):
+ """alembic.ini fixture to work exactly with the 'multidb' template"""
+
+ dir_ = os.path.join(_get_staging_directory(), "scripts")
+
+ sqlalchemy_future = "future" in config.db.__class__.__module__
+
+ databases = ", ".join(engines.keys())
+ engines = "\n\n".join(
+ "[%s]\n" "sqlalchemy.url = %s" % (key, value.url)
+ for key, value in engines.items()
+ )
+
+ return _write_config_file(
+ """
+[alembic]
+script_location = %s
+sourceless = false
+sqlalchemy.future = %s
+databases = %s
+
+%s
+[loggers]
+keys = root
+
+[handlers]
+keys = console
+
+[logger_root]
+level = WARN
+handlers = console
+qualname =
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatters]
+keys = generic
+
+[formatter_generic]
+format = %%(levelname)-5.5s [%%(name)s] %%(message)s
+datefmt = %%H:%%M:%%S
+ """
+ % (dir_, "true" if sqlalchemy_future else "false", databases, engines)
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/fixtures.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/fixtures.py
new file mode 100644
index 00000000..4b83a745
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/testing/fixtures.py
@@ -0,0 +1,306 @@
+from __future__ import annotations
+
+import configparser
+from contextlib import contextmanager
+import io
+import re
+from typing import Any
+from typing import Dict
+
+from sqlalchemy import Column
+from sqlalchemy import inspect
+from sqlalchemy import MetaData
+from sqlalchemy import String
+from sqlalchemy import Table
+from sqlalchemy import testing
+from sqlalchemy import text
+from sqlalchemy.testing import config
+from sqlalchemy.testing import mock
+from sqlalchemy.testing.assertions import eq_
+from sqlalchemy.testing.fixtures import TablesTest as SQLAlchemyTablesTest
+from sqlalchemy.testing.fixtures import TestBase as SQLAlchemyTestBase
+
+import alembic
+from .assertions import _get_dialect
+from ..environment import EnvironmentContext
+from ..migration import MigrationContext
+from ..operations import Operations
+from ..util import sqla_compat
+from ..util.sqla_compat import create_mock_engine
+from ..util.sqla_compat import sqla_14
+from ..util.sqla_compat import sqla_2
+
+
+testing_config = configparser.ConfigParser()
+testing_config.read(["test.cfg"])
+
+
+class TestBase(SQLAlchemyTestBase):
+ is_sqlalchemy_future = sqla_2
+
+ @testing.fixture()
+ def ops_context(self, migration_context):
+ with migration_context.begin_transaction(_per_migration=True):
+ yield Operations(migration_context)
+
+ @testing.fixture
+ def migration_context(self, connection):
+ return MigrationContext.configure(
+ connection, opts=dict(transaction_per_migration=True)
+ )
+
+ @testing.fixture
+ def connection(self):
+ with config.db.connect() as conn:
+ yield conn
+
+
+class TablesTest(TestBase, SQLAlchemyTablesTest):
+ pass
+
+
+if sqla_14:
+ from sqlalchemy.testing.fixtures import FutureEngineMixin
+else:
+
+ class FutureEngineMixin: # type:ignore[no-redef]
+ __requires__ = ("sqlalchemy_14",)
+
+
+FutureEngineMixin.is_sqlalchemy_future = True
+
+
+def capture_db(dialect="postgresql://"):
+ buf = []
+
+ def dump(sql, *multiparams, **params):
+ buf.append(str(sql.compile(dialect=engine.dialect)))
+
+ engine = create_mock_engine(dialect, dump)
+ return engine, buf
+
+
+_engs: Dict[Any, Any] = {}
+
+
+@contextmanager
+def capture_context_buffer(**kw):
+ if kw.pop("bytes_io", False):
+ buf = io.BytesIO()
+ else:
+ buf = io.StringIO()
+
+ kw.update({"dialect_name": "sqlite", "output_buffer": buf})
+ conf = EnvironmentContext.configure
+
+ def configure(*arg, **opt):
+ opt.update(**kw)
+ return conf(*arg, **opt)
+
+ with mock.patch.object(EnvironmentContext, "configure", configure):
+ yield buf
+
+
+@contextmanager
+def capture_engine_context_buffer(**kw):
+ from .env import _sqlite_file_db
+ from sqlalchemy import event
+
+ buf = io.StringIO()
+
+ eng = _sqlite_file_db()
+
+ conn = eng.connect()
+
+ @event.listens_for(conn, "before_cursor_execute")
+ def bce(conn, cursor, statement, parameters, context, executemany):
+ buf.write(statement + "\n")
+
+ kw.update({"connection": conn})
+ conf = EnvironmentContext.configure
+
+ def configure(*arg, **opt):
+ opt.update(**kw)
+ return conf(*arg, **opt)
+
+ with mock.patch.object(EnvironmentContext, "configure", configure):
+ yield buf
+
+
+def op_fixture(
+ dialect="default",
+ as_sql=False,
+ naming_convention=None,
+ literal_binds=False,
+ native_boolean=None,
+):
+ opts = {}
+ if naming_convention:
+ opts["target_metadata"] = MetaData(naming_convention=naming_convention)
+
+ class buffer_:
+ def __init__(self):
+ self.lines = []
+
+ def write(self, msg):
+ msg = msg.strip()
+ msg = re.sub(r"[\n\t]", "", msg)
+ if as_sql:
+ # the impl produces soft tabs,
+ # so search for blocks of 4 spaces
+ msg = re.sub(r" ", "", msg)
+ msg = re.sub(r"\;\n*$", "", msg)
+
+ self.lines.append(msg)
+
+ def flush(self):
+ pass
+
+ buf = buffer_()
+
+ class ctx(MigrationContext):
+ def get_buf(self):
+ return buf
+
+ def clear_assertions(self):
+ buf.lines[:] = []
+
+ def assert_(self, *sql):
+ # TODO: make this more flexible about
+ # whitespace and such
+ eq_(buf.lines, [re.sub(r"[\n\t]", "", s) for s in sql])
+
+ def assert_contains(self, sql):
+ for stmt in buf.lines:
+ if re.sub(r"[\n\t]", "", sql) in stmt:
+ return
+ else:
+ assert False, "Could not locate fragment %r in %r" % (
+ sql,
+ buf.lines,
+ )
+
+ if as_sql:
+ opts["as_sql"] = as_sql
+ if literal_binds:
+ opts["literal_binds"] = literal_binds
+ if not sqla_14 and dialect == "mariadb":
+ ctx_dialect = _get_dialect("mysql")
+ ctx_dialect.server_version_info = (10, 4, 0, "MariaDB")
+
+ else:
+ ctx_dialect = _get_dialect(dialect)
+ if native_boolean is not None:
+ ctx_dialect.supports_native_boolean = native_boolean
+ # this is new as of SQLAlchemy 1.2.7 and is used by SQL Server,
+ # which breaks assumptions in the alembic test suite
+ ctx_dialect.non_native_boolean_check_constraint = True
+ if not as_sql:
+
+ def execute(stmt, *multiparam, **param):
+ if isinstance(stmt, str):
+ stmt = text(stmt)
+ assert stmt.supports_execution
+ sql = str(stmt.compile(dialect=ctx_dialect))
+
+ buf.write(sql)
+
+ connection = mock.Mock(dialect=ctx_dialect, execute=execute)
+ else:
+ opts["output_buffer"] = buf
+ connection = None
+ context = ctx(ctx_dialect, connection, opts)
+
+ alembic.op._proxy = Operations(context)
+ return context
+
+
+class AlterColRoundTripFixture:
+ # since these tests are about syntax, use more recent SQLAlchemy as some of
+ # the type / server default compare logic might not work on older
+ # SQLAlchemy versions as seems to be the case for SQLAlchemy 1.1 on Oracle
+
+ __requires__ = ("alter_column",)
+
+ def setUp(self):
+ self.conn = config.db.connect()
+ self.ctx = MigrationContext.configure(self.conn)
+ self.op = Operations(self.ctx)
+ self.metadata = MetaData()
+
+ def _compare_type(self, t1, t2):
+ c1 = Column("q", t1)
+ c2 = Column("q", t2)
+ assert not self.ctx.impl.compare_type(
+ c1, c2
+ ), "Type objects %r and %r didn't compare as equivalent" % (t1, t2)
+
+ def _compare_server_default(self, t1, s1, t2, s2):
+ c1 = Column("q", t1, server_default=s1)
+ c2 = Column("q", t2, server_default=s2)
+ assert not self.ctx.impl.compare_server_default(
+ c1, c2, s2, s1
+ ), "server defaults %r and %r didn't compare as equivalent" % (s1, s2)
+
+ def tearDown(self):
+ sqla_compat._safe_rollback_connection_transaction(self.conn)
+ with self.conn.begin():
+ self.metadata.drop_all(self.conn)
+ self.conn.close()
+
+ def _run_alter_col(self, from_, to_, compare=None):
+ column = Column(
+ from_.get("name", "colname"),
+ from_.get("type", String(10)),
+ nullable=from_.get("nullable", True),
+ server_default=from_.get("server_default", None),
+ # comment=from_.get("comment", None)
+ )
+ t = Table("x", self.metadata, column)
+
+ with sqla_compat._ensure_scope_for_ddl(self.conn):
+ t.create(self.conn)
+ insp = inspect(self.conn)
+ old_col = insp.get_columns("x")[0]
+
+ # TODO: conditional comment support
+ self.op.alter_column(
+ "x",
+ column.name,
+ existing_type=column.type,
+ existing_server_default=column.server_default
+ if column.server_default is not None
+ else False,
+ existing_nullable=True if column.nullable else False,
+ # existing_comment=column.comment,
+ nullable=to_.get("nullable", None),
+ # modify_comment=False,
+ server_default=to_.get("server_default", False),
+ new_column_name=to_.get("name", None),
+ type_=to_.get("type", None),
+ )
+
+ insp = inspect(self.conn)
+ new_col = insp.get_columns("x")[0]
+
+ if compare is None:
+ compare = to_
+
+ eq_(
+ new_col["name"],
+ compare["name"] if "name" in compare else column.name,
+ )
+ self._compare_type(
+ new_col["type"], compare.get("type", old_col["type"])
+ )
+ eq_(new_col["nullable"], compare.get("nullable", column.nullable))
+ self._compare_server_default(
+ new_col["type"],
+ new_col.get("default", None),
+ compare.get("type", old_col["type"]),
+ compare["server_default"].text
+ if "server_default" in compare
+ else column.server_default.arg.text
+ if column.server_default is not None
+ else None,
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/plugin/__init__.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/plugin/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/plugin/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/plugin/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..eb3b17f4
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/plugin/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/plugin/__pycache__/bootstrap.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/plugin/__pycache__/bootstrap.cpython-312.pyc
new file mode 100644
index 00000000..fc47424e
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/plugin/__pycache__/bootstrap.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/plugin/bootstrap.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/plugin/bootstrap.py
new file mode 100644
index 00000000..d4a2c552
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/testing/plugin/bootstrap.py
@@ -0,0 +1,4 @@
+"""
+Bootstrapper for test framework plugins.
+
+"""
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/requirements.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/requirements.py
new file mode 100644
index 00000000..2107da46
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/testing/requirements.py
@@ -0,0 +1,198 @@
+from sqlalchemy.testing.requirements import Requirements
+
+from alembic import util
+from alembic.util import sqla_compat
+from ..testing import exclusions
+
+
+class SuiteRequirements(Requirements):
+ @property
+ def schemas(self):
+ """Target database must support external schemas, and have one
+ named 'test_schema'."""
+
+ return exclusions.open()
+
+ @property
+ def autocommit_isolation(self):
+ """target database should support 'AUTOCOMMIT' isolation level"""
+
+ return exclusions.closed()
+
+ @property
+ def materialized_views(self):
+ """needed for sqlalchemy compat"""
+ return exclusions.closed()
+
+ @property
+ def unique_constraint_reflection(self):
+ def doesnt_have_check_uq_constraints(config):
+ from sqlalchemy import inspect
+
+ insp = inspect(config.db)
+ try:
+ insp.get_unique_constraints("x")
+ except NotImplementedError:
+ return True
+ except TypeError:
+ return True
+ except Exception:
+ pass
+ return False
+
+ return exclusions.skip_if(doesnt_have_check_uq_constraints)
+
+ @property
+ def sequences(self):
+ """Target database must support SEQUENCEs."""
+
+ return exclusions.only_if(
+ [lambda config: config.db.dialect.supports_sequences],
+ "no sequence support",
+ )
+
+ @property
+ def foreign_key_match(self):
+ return exclusions.open()
+
+ @property
+ def foreign_key_constraint_reflection(self):
+ return exclusions.open()
+
+ @property
+ def check_constraints_w_enforcement(self):
+ """Target database must support check constraints
+ and also enforce them."""
+
+ return exclusions.open()
+
+ @property
+ def reflects_pk_names(self):
+ return exclusions.closed()
+
+ @property
+ def reflects_fk_options(self):
+ return exclusions.closed()
+
+ @property
+ def sqlalchemy_14(self):
+ return exclusions.skip_if(
+ lambda config: not util.sqla_14,
+ "SQLAlchemy 1.4 or greater required",
+ )
+
+ @property
+ def sqlalchemy_1x(self):
+ return exclusions.skip_if(
+ lambda config: util.sqla_2,
+ "SQLAlchemy 1.x test",
+ )
+
+ @property
+ def sqlalchemy_2(self):
+ return exclusions.skip_if(
+ lambda config: not util.sqla_2,
+ "SQLAlchemy 2.x test",
+ )
+
+ @property
+ def comments(self):
+ return exclusions.only_if(
+ lambda config: config.db.dialect.supports_comments
+ )
+
+ @property
+ def alter_column(self):
+ return exclusions.open()
+
+ @property
+ def computed_columns(self):
+ return exclusions.closed()
+
+ @property
+ def computed_columns_api(self):
+ return exclusions.only_if(
+ exclusions.BooleanPredicate(sqla_compat.has_computed)
+ )
+
+ @property
+ def computed_reflects_normally(self):
+ return exclusions.only_if(
+ exclusions.BooleanPredicate(sqla_compat.has_computed_reflection)
+ )
+
+ @property
+ def computed_reflects_as_server_default(self):
+ return exclusions.closed()
+
+ @property
+ def computed_doesnt_reflect_as_server_default(self):
+ return exclusions.closed()
+
+ @property
+ def autoincrement_on_composite_pk(self):
+ return exclusions.closed()
+
+ @property
+ def fk_ondelete_is_reflected(self):
+ return exclusions.closed()
+
+ @property
+ def fk_onupdate_is_reflected(self):
+ return exclusions.closed()
+
+ @property
+ def fk_onupdate(self):
+ return exclusions.open()
+
+ @property
+ def fk_ondelete_restrict(self):
+ return exclusions.open()
+
+ @property
+ def fk_onupdate_restrict(self):
+ return exclusions.open()
+
+ @property
+ def fk_ondelete_noaction(self):
+ return exclusions.open()
+
+ @property
+ def fk_initially(self):
+ return exclusions.closed()
+
+ @property
+ def fk_deferrable(self):
+ return exclusions.closed()
+
+ @property
+ def fk_deferrable_is_reflected(self):
+ return exclusions.closed()
+
+ @property
+ def fk_names(self):
+ return exclusions.open()
+
+ @property
+ def integer_subtype_comparisons(self):
+ return exclusions.open()
+
+ @property
+ def no_name_normalize(self):
+ return exclusions.skip_if(
+ lambda config: config.db.dialect.requires_name_normalize
+ )
+
+ @property
+ def identity_columns(self):
+ return exclusions.closed()
+
+ @property
+ def identity_columns_alter(self):
+ return exclusions.closed()
+
+ @property
+ def identity_columns_api(self):
+ return exclusions.only_if(
+ exclusions.BooleanPredicate(sqla_compat.has_identity)
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/schemacompare.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/schemacompare.py
new file mode 100644
index 00000000..c0634995
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/testing/schemacompare.py
@@ -0,0 +1,160 @@
+from itertools import zip_longest
+
+from sqlalchemy import schema
+
+
+class CompareTable:
+ def __init__(self, table):
+ self.table = table
+
+ def __eq__(self, other):
+ if self.table.name != other.name or self.table.schema != other.schema:
+ return False
+
+ for c1, c2 in zip_longest(self.table.c, other.c):
+ if (c1 is None and c2 is not None) or (
+ c2 is None and c1 is not None
+ ):
+ return False
+ if CompareColumn(c1) != c2:
+ return False
+
+ return True
+
+ # TODO: compare constraints, indexes
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+
+class CompareColumn:
+ def __init__(self, column):
+ self.column = column
+
+ def __eq__(self, other):
+ return (
+ self.column.name == other.name
+ and self.column.nullable == other.nullable
+ )
+ # TODO: datatypes etc
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+
+class CompareIndex:
+ def __init__(self, index, name_only=False):
+ self.index = index
+ self.name_only = name_only
+
+ def __eq__(self, other):
+ if self.name_only:
+ return self.index.name == other.name
+ else:
+ return (
+ str(schema.CreateIndex(self.index))
+ == str(schema.CreateIndex(other))
+ and self.index.dialect_kwargs == other.dialect_kwargs
+ )
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+
+class CompareCheckConstraint:
+ def __init__(self, constraint):
+ self.constraint = constraint
+
+ def __eq__(self, other):
+ return (
+ isinstance(other, schema.CheckConstraint)
+ and self.constraint.name == other.name
+ and (str(self.constraint.sqltext) == str(other.sqltext))
+ and (other.table.name == self.constraint.table.name)
+ and other.table.schema == self.constraint.table.schema
+ )
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+
+class CompareForeignKey:
+ def __init__(self, constraint):
+ self.constraint = constraint
+
+ def __eq__(self, other):
+ r1 = (
+ isinstance(other, schema.ForeignKeyConstraint)
+ and self.constraint.name == other.name
+ and (other.table.name == self.constraint.table.name)
+ and other.table.schema == self.constraint.table.schema
+ )
+ if not r1:
+ return False
+ for c1, c2 in zip_longest(self.constraint.columns, other.columns):
+ if (c1 is None and c2 is not None) or (
+ c2 is None and c1 is not None
+ ):
+ return False
+ if CompareColumn(c1) != c2:
+ return False
+ return True
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+
+class ComparePrimaryKey:
+ def __init__(self, constraint):
+ self.constraint = constraint
+
+ def __eq__(self, other):
+ r1 = (
+ isinstance(other, schema.PrimaryKeyConstraint)
+ and self.constraint.name == other.name
+ and (other.table.name == self.constraint.table.name)
+ and other.table.schema == self.constraint.table.schema
+ )
+ if not r1:
+ return False
+
+ for c1, c2 in zip_longest(self.constraint.columns, other.columns):
+ if (c1 is None and c2 is not None) or (
+ c2 is None and c1 is not None
+ ):
+ return False
+ if CompareColumn(c1) != c2:
+ return False
+
+ return True
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+
+class CompareUniqueConstraint:
+ def __init__(self, constraint):
+ self.constraint = constraint
+
+ def __eq__(self, other):
+ r1 = (
+ isinstance(other, schema.UniqueConstraint)
+ and self.constraint.name == other.name
+ and (other.table.name == self.constraint.table.name)
+ and other.table.schema == self.constraint.table.schema
+ )
+ if not r1:
+ return False
+
+ for c1, c2 in zip_longest(self.constraint.columns, other.columns):
+ if (c1 is None and c2 is not None) or (
+ c2 is None and c1 is not None
+ ):
+ return False
+ if CompareColumn(c1) != c2:
+ return False
+
+ return True
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__init__.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__init__.py
new file mode 100644
index 00000000..3da498d2
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__init__.py
@@ -0,0 +1,7 @@
+from .test_autogen_comments import * # noqa
+from .test_autogen_computed import * # noqa
+from .test_autogen_diffs import * # noqa
+from .test_autogen_fks import * # noqa
+from .test_autogen_identity import * # noqa
+from .test_environment import * # noqa
+from .test_op import * # noqa
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..3358569d
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/_autogen_fixtures.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/_autogen_fixtures.cpython-312.pyc
new file mode 100644
index 00000000..07886d90
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/_autogen_fixtures.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_comments.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_comments.cpython-312.pyc
new file mode 100644
index 00000000..dbd4514a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_comments.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_computed.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_computed.cpython-312.pyc
new file mode 100644
index 00000000..cf431aec
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_computed.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_diffs.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_diffs.cpython-312.pyc
new file mode 100644
index 00000000..cab40b87
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_diffs.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_fks.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_fks.cpython-312.pyc
new file mode 100644
index 00000000..c35524d9
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_fks.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_identity.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_identity.cpython-312.pyc
new file mode 100644
index 00000000..bee5bd44
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_autogen_identity.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_environment.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_environment.cpython-312.pyc
new file mode 100644
index 00000000..f3940a7e
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_environment.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_op.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_op.cpython-312.pyc
new file mode 100644
index 00000000..b84e9cfd
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/__pycache__/test_op.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/_autogen_fixtures.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/_autogen_fixtures.py
new file mode 100644
index 00000000..d838ebef
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/_autogen_fixtures.py
@@ -0,0 +1,335 @@
+from __future__ import annotations
+
+from typing import Any
+from typing import Dict
+from typing import Set
+
+from sqlalchemy import CHAR
+from sqlalchemy import CheckConstraint
+from sqlalchemy import Column
+from sqlalchemy import event
+from sqlalchemy import ForeignKey
+from sqlalchemy import Index
+from sqlalchemy import inspect
+from sqlalchemy import Integer
+from sqlalchemy import MetaData
+from sqlalchemy import Numeric
+from sqlalchemy import String
+from sqlalchemy import Table
+from sqlalchemy import Text
+from sqlalchemy import text
+from sqlalchemy import UniqueConstraint
+
+from ... import autogenerate
+from ... import util
+from ...autogenerate import api
+from ...ddl.base import _fk_spec
+from ...migration import MigrationContext
+from ...operations import ops
+from ...testing import config
+from ...testing import eq_
+from ...testing.env import clear_staging_env
+from ...testing.env import staging_env
+
+names_in_this_test: Set[Any] = set()
+
+
+@event.listens_for(Table, "after_parent_attach")
+def new_table(table, parent):
+ names_in_this_test.add(table.name)
+
+
+def _default_include_object(obj, name, type_, reflected, compare_to):
+ if type_ == "table":
+ return name in names_in_this_test
+ else:
+ return True
+
+
+_default_object_filters: Any = _default_include_object
+
+_default_name_filters: Any = None
+
+
+class ModelOne:
+ __requires__ = ("unique_constraint_reflection",)
+
+ schema: Any = None
+
+ @classmethod
+ def _get_db_schema(cls):
+ schema = cls.schema
+
+ m = MetaData(schema=schema)
+
+ Table(
+ "user",
+ m,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50)),
+ Column("a1", Text),
+ Column("pw", String(50)),
+ Index("pw_idx", "pw"),
+ )
+
+ Table(
+ "address",
+ m,
+ Column("id", Integer, primary_key=True),
+ Column("email_address", String(100), nullable=False),
+ )
+
+ Table(
+ "order",
+ m,
+ Column("order_id", Integer, primary_key=True),
+ Column(
+ "amount",
+ Numeric(8, 2),
+ nullable=False,
+ server_default=text("0"),
+ ),
+ CheckConstraint("amount >= 0", name="ck_order_amount"),
+ )
+
+ Table(
+ "extra",
+ m,
+ Column("x", CHAR),
+ Column("uid", Integer, ForeignKey("user.id")),
+ )
+
+ return m
+
+ @classmethod
+ def _get_model_schema(cls):
+ schema = cls.schema
+
+ m = MetaData(schema=schema)
+
+ Table(
+ "user",
+ m,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50), nullable=False),
+ Column("a1", Text, server_default="x"),
+ )
+
+ Table(
+ "address",
+ m,
+ Column("id", Integer, primary_key=True),
+ Column("email_address", String(100), nullable=False),
+ Column("street", String(50)),
+ UniqueConstraint("email_address", name="uq_email"),
+ )
+
+ Table(
+ "order",
+ m,
+ Column("order_id", Integer, primary_key=True),
+ Column(
+ "amount",
+ Numeric(10, 2),
+ nullable=True,
+ server_default=text("0"),
+ ),
+ Column("user_id", Integer, ForeignKey("user.id")),
+ CheckConstraint("amount > -1", name="ck_order_amount"),
+ )
+
+ Table(
+ "item",
+ m,
+ Column("id", Integer, primary_key=True),
+ Column("description", String(100)),
+ Column("order_id", Integer, ForeignKey("order.order_id")),
+ CheckConstraint("len(description) > 5"),
+ )
+ return m
+
+
+class _ComparesFKs:
+ def _assert_fk_diff(
+ self,
+ diff,
+ type_,
+ source_table,
+ source_columns,
+ target_table,
+ target_columns,
+ name=None,
+ conditional_name=None,
+ source_schema=None,
+ onupdate=None,
+ ondelete=None,
+ initially=None,
+ deferrable=None,
+ ):
+ # the public API for ForeignKeyConstraint was not very rich
+ # in 0.7, 0.8, so here we use the well-known but slightly
+ # private API to get at its elements
+ (
+ fk_source_schema,
+ fk_source_table,
+ fk_source_columns,
+ fk_target_schema,
+ fk_target_table,
+ fk_target_columns,
+ fk_onupdate,
+ fk_ondelete,
+ fk_deferrable,
+ fk_initially,
+ ) = _fk_spec(diff[1])
+
+ eq_(diff[0], type_)
+ eq_(fk_source_table, source_table)
+ eq_(fk_source_columns, source_columns)
+ eq_(fk_target_table, target_table)
+ eq_(fk_source_schema, source_schema)
+ eq_(fk_onupdate, onupdate)
+ eq_(fk_ondelete, ondelete)
+ eq_(fk_initially, initially)
+ eq_(fk_deferrable, deferrable)
+
+ eq_([elem.column.name for elem in diff[1].elements], target_columns)
+ if conditional_name is not None:
+ if conditional_name == "servergenerated":
+ fks = inspect(self.bind).get_foreign_keys(source_table)
+ server_fk_name = fks[0]["name"]
+ eq_(diff[1].name, server_fk_name)
+ else:
+ eq_(diff[1].name, conditional_name)
+ else:
+ eq_(diff[1].name, name)
+
+
+class AutogenTest(_ComparesFKs):
+ def _flatten_diffs(self, diffs):
+ for d in diffs:
+ if isinstance(d, list):
+ yield from self._flatten_diffs(d)
+ else:
+ yield d
+
+ @classmethod
+ def _get_bind(cls):
+ return config.db
+
+ configure_opts: Dict[Any, Any] = {}
+
+ @classmethod
+ def setup_class(cls):
+ staging_env()
+ cls.bind = cls._get_bind()
+ cls.m1 = cls._get_db_schema()
+ cls.m1.create_all(cls.bind)
+ cls.m2 = cls._get_model_schema()
+
+ @classmethod
+ def teardown_class(cls):
+ cls.m1.drop_all(cls.bind)
+ clear_staging_env()
+
+ def setUp(self):
+ self.conn = conn = self.bind.connect()
+ ctx_opts = {
+ "compare_type": True,
+ "compare_server_default": True,
+ "target_metadata": self.m2,
+ "upgrade_token": "upgrades",
+ "downgrade_token": "downgrades",
+ "alembic_module_prefix": "op.",
+ "sqlalchemy_module_prefix": "sa.",
+ "include_object": _default_object_filters,
+ "include_name": _default_name_filters,
+ }
+ if self.configure_opts:
+ ctx_opts.update(self.configure_opts)
+ self.context = context = MigrationContext.configure(
+ connection=conn, opts=ctx_opts
+ )
+
+ self.autogen_context = api.AutogenContext(context, self.m2)
+
+ def tearDown(self):
+ self.conn.close()
+
+ def _update_context(
+ self, object_filters=None, name_filters=None, include_schemas=None
+ ):
+ if include_schemas is not None:
+ self.autogen_context.opts["include_schemas"] = include_schemas
+ if object_filters is not None:
+ self.autogen_context._object_filters = [object_filters]
+ if name_filters is not None:
+ self.autogen_context._name_filters = [name_filters]
+ return self.autogen_context
+
+
+class AutogenFixtureTest(_ComparesFKs):
+ def _fixture(
+ self,
+ m1,
+ m2,
+ include_schemas=False,
+ opts=None,
+ object_filters=_default_object_filters,
+ name_filters=_default_name_filters,
+ return_ops=False,
+ max_identifier_length=None,
+ ):
+ if max_identifier_length:
+ dialect = self.bind.dialect
+ existing_length = dialect.max_identifier_length
+ dialect.max_identifier_length = (
+ dialect._user_defined_max_identifier_length
+ ) = max_identifier_length
+ try:
+ self._alembic_metadata, model_metadata = m1, m2
+ for m in util.to_list(self._alembic_metadata):
+ m.create_all(self.bind)
+
+ with self.bind.connect() as conn:
+ ctx_opts = {
+ "compare_type": True,
+ "compare_server_default": True,
+ "target_metadata": model_metadata,
+ "upgrade_token": "upgrades",
+ "downgrade_token": "downgrades",
+ "alembic_module_prefix": "op.",
+ "sqlalchemy_module_prefix": "sa.",
+ "include_object": object_filters,
+ "include_name": name_filters,
+ "include_schemas": include_schemas,
+ }
+ if opts:
+ ctx_opts.update(opts)
+ self.context = context = MigrationContext.configure(
+ connection=conn, opts=ctx_opts
+ )
+
+ autogen_context = api.AutogenContext(context, model_metadata)
+ uo = ops.UpgradeOps(ops=[])
+ autogenerate._produce_net_changes(autogen_context, uo)
+
+ if return_ops:
+ return uo
+ else:
+ return uo.as_diffs()
+ finally:
+ if max_identifier_length:
+ dialect = self.bind.dialect
+ dialect.max_identifier_length = (
+ dialect._user_defined_max_identifier_length
+ ) = existing_length
+
+ def setUp(self):
+ staging_env()
+ self.bind = config.db
+
+ def tearDown(self):
+ if hasattr(self, "_alembic_metadata"):
+ for m in util.to_list(self._alembic_metadata):
+ m.drop_all(self.bind)
+ clear_staging_env()
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_comments.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_comments.py
new file mode 100644
index 00000000..7ef074f5
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_comments.py
@@ -0,0 +1,242 @@
+from sqlalchemy import Column
+from sqlalchemy import Float
+from sqlalchemy import MetaData
+from sqlalchemy import String
+from sqlalchemy import Table
+
+from ._autogen_fixtures import AutogenFixtureTest
+from ...testing import eq_
+from ...testing import mock
+from ...testing import TestBase
+
+
+class AutogenerateCommentsTest(AutogenFixtureTest, TestBase):
+ __backend__ = True
+
+ __requires__ = ("comments",)
+
+ def test_existing_table_comment_no_change(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "some_table",
+ m1,
+ Column("test", String(10), primary_key=True),
+ comment="this is some table",
+ )
+
+ Table(
+ "some_table",
+ m2,
+ Column("test", String(10), primary_key=True),
+ comment="this is some table",
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ eq_(diffs, [])
+
+ def test_add_table_comment(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table("some_table", m1, Column("test", String(10), primary_key=True))
+
+ Table(
+ "some_table",
+ m2,
+ Column("test", String(10), primary_key=True),
+ comment="this is some table",
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ eq_(diffs[0][0], "add_table_comment")
+ eq_(diffs[0][1].comment, "this is some table")
+ eq_(diffs[0][2], None)
+
+ def test_remove_table_comment(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "some_table",
+ m1,
+ Column("test", String(10), primary_key=True),
+ comment="this is some table",
+ )
+
+ Table("some_table", m2, Column("test", String(10), primary_key=True))
+
+ diffs = self._fixture(m1, m2)
+
+ eq_(diffs[0][0], "remove_table_comment")
+ eq_(diffs[0][1].comment, None)
+
+ def test_alter_table_comment(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "some_table",
+ m1,
+ Column("test", String(10), primary_key=True),
+ comment="this is some table",
+ )
+
+ Table(
+ "some_table",
+ m2,
+ Column("test", String(10), primary_key=True),
+ comment="this is also some table",
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ eq_(diffs[0][0], "add_table_comment")
+ eq_(diffs[0][1].comment, "this is also some table")
+ eq_(diffs[0][2], "this is some table")
+
+ def test_existing_column_comment_no_change(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "some_table",
+ m1,
+ Column("test", String(10), primary_key=True),
+ Column("amount", Float, comment="the amount"),
+ )
+
+ Table(
+ "some_table",
+ m2,
+ Column("test", String(10), primary_key=True),
+ Column("amount", Float, comment="the amount"),
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ eq_(diffs, [])
+
+ def test_add_column_comment(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "some_table",
+ m1,
+ Column("test", String(10), primary_key=True),
+ Column("amount", Float),
+ )
+
+ Table(
+ "some_table",
+ m2,
+ Column("test", String(10), primary_key=True),
+ Column("amount", Float, comment="the amount"),
+ )
+
+ diffs = self._fixture(m1, m2)
+ eq_(
+ diffs,
+ [
+ [
+ (
+ "modify_comment",
+ None,
+ "some_table",
+ "amount",
+ {
+ "existing_nullable": True,
+ "existing_type": mock.ANY,
+ "existing_server_default": False,
+ },
+ None,
+ "the amount",
+ )
+ ]
+ ],
+ )
+
+ def test_remove_column_comment(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "some_table",
+ m1,
+ Column("test", String(10), primary_key=True),
+ Column("amount", Float, comment="the amount"),
+ )
+
+ Table(
+ "some_table",
+ m2,
+ Column("test", String(10), primary_key=True),
+ Column("amount", Float),
+ )
+
+ diffs = self._fixture(m1, m2)
+ eq_(
+ diffs,
+ [
+ [
+ (
+ "modify_comment",
+ None,
+ "some_table",
+ "amount",
+ {
+ "existing_nullable": True,
+ "existing_type": mock.ANY,
+ "existing_server_default": False,
+ },
+ "the amount",
+ None,
+ )
+ ]
+ ],
+ )
+
+ def test_alter_column_comment(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "some_table",
+ m1,
+ Column("test", String(10), primary_key=True),
+ Column("amount", Float, comment="the amount"),
+ )
+
+ Table(
+ "some_table",
+ m2,
+ Column("test", String(10), primary_key=True),
+ Column("amount", Float, comment="the adjusted amount"),
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ eq_(
+ diffs,
+ [
+ [
+ (
+ "modify_comment",
+ None,
+ "some_table",
+ "amount",
+ {
+ "existing_nullable": True,
+ "existing_type": mock.ANY,
+ "existing_server_default": False,
+ },
+ "the amount",
+ "the adjusted amount",
+ )
+ ]
+ ],
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_computed.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_computed.py
new file mode 100644
index 00000000..01a89a1f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_computed.py
@@ -0,0 +1,203 @@
+import sqlalchemy as sa
+from sqlalchemy import Column
+from sqlalchemy import Integer
+from sqlalchemy import MetaData
+from sqlalchemy import Table
+
+from ._autogen_fixtures import AutogenFixtureTest
+from ... import testing
+from ...testing import config
+from ...testing import eq_
+from ...testing import exclusions
+from ...testing import is_
+from ...testing import is_true
+from ...testing import mock
+from ...testing import TestBase
+
+
+class AutogenerateComputedTest(AutogenFixtureTest, TestBase):
+ __requires__ = ("computed_columns",)
+ __backend__ = True
+
+ def test_add_computed_column(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table("user", m1, Column("id", Integer, primary_key=True))
+
+ Table(
+ "user",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("foo", Integer, sa.Computed("5")),
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ eq_(diffs[0][0], "add_column")
+ eq_(diffs[0][2], "user")
+ eq_(diffs[0][3].name, "foo")
+ c = diffs[0][3].computed
+
+ is_true(isinstance(c, sa.Computed))
+ is_(c.persisted, None)
+ eq_(str(c.sqltext), "5")
+
+ def test_remove_computed_column(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "user",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("foo", Integer, sa.Computed("5")),
+ )
+
+ Table("user", m2, Column("id", Integer, primary_key=True))
+
+ diffs = self._fixture(m1, m2)
+
+ eq_(diffs[0][0], "remove_column")
+ eq_(diffs[0][2], "user")
+ c = diffs[0][3]
+ eq_(c.name, "foo")
+
+ if config.requirements.computed_reflects_normally.enabled:
+ is_true(isinstance(c.computed, sa.Computed))
+ else:
+ is_(c.computed, None)
+
+ if config.requirements.computed_reflects_as_server_default.enabled:
+ is_true(isinstance(c.server_default, sa.DefaultClause))
+ eq_(str(c.server_default.arg.text), "5")
+ elif config.requirements.computed_reflects_normally.enabled:
+ is_true(isinstance(c.computed, sa.Computed))
+ else:
+ is_(c.computed, None)
+
+ @testing.combinations(
+ lambda: (None, sa.Computed("bar*5")),
+ (lambda: (sa.Computed("bar*5"), None)),
+ lambda: (
+ sa.Computed("bar*5"),
+ sa.Computed("bar * 42", persisted=True),
+ ),
+ lambda: (sa.Computed("bar*5"), sa.Computed("bar * 42")),
+ )
+ @config.requirements.computed_reflects_normally
+ def test_cant_change_computed_warning(self, test_case):
+ arg_before, arg_after = testing.resolve_lambda(test_case, **locals())
+ m1 = MetaData()
+ m2 = MetaData()
+
+ arg_before = [] if arg_before is None else [arg_before]
+ arg_after = [] if arg_after is None else [arg_after]
+
+ Table(
+ "user",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("bar", Integer),
+ Column("foo", Integer, *arg_before),
+ )
+
+ Table(
+ "user",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("bar", Integer),
+ Column("foo", Integer, *arg_after),
+ )
+
+ with mock.patch("alembic.util.warn") as mock_warn:
+ diffs = self._fixture(m1, m2)
+
+ eq_(
+ mock_warn.mock_calls,
+ [mock.call("Computed default on user.foo cannot be modified")],
+ )
+
+ eq_(list(diffs), [])
+
+ @testing.combinations(
+ lambda: (None, None),
+ lambda: (sa.Computed("5"), sa.Computed("5")),
+ lambda: (sa.Computed("bar*5"), sa.Computed("bar*5")),
+ (
+ lambda: (sa.Computed("bar*5"), None),
+ config.requirements.computed_doesnt_reflect_as_server_default,
+ ),
+ )
+ def test_computed_unchanged(self, test_case):
+ arg_before, arg_after = testing.resolve_lambda(test_case, **locals())
+ m1 = MetaData()
+ m2 = MetaData()
+
+ arg_before = [] if arg_before is None else [arg_before]
+ arg_after = [] if arg_after is None else [arg_after]
+
+ Table(
+ "user",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("bar", Integer),
+ Column("foo", Integer, *arg_before),
+ )
+
+ Table(
+ "user",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("bar", Integer),
+ Column("foo", Integer, *arg_after),
+ )
+
+ with mock.patch("alembic.util.warn") as mock_warn:
+ diffs = self._fixture(m1, m2)
+ eq_(mock_warn.mock_calls, [])
+
+ eq_(list(diffs), [])
+
+ @config.requirements.computed_reflects_as_server_default
+ def test_remove_computed_default_on_computed(self):
+ """Asserts the current behavior which is that on PG and Oracle,
+ the GENERATED ALWAYS AS is reflected as a server default which we can't
+ tell is actually "computed", so these come out as a modification to
+ the server default.
+
+ """
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "user",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("bar", Integer),
+ Column("foo", Integer, sa.Computed("bar + 42")),
+ )
+
+ Table(
+ "user",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("bar", Integer),
+ Column("foo", Integer),
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ eq_(diffs[0][0][0], "modify_default")
+ eq_(diffs[0][0][2], "user")
+ eq_(diffs[0][0][3], "foo")
+ old = diffs[0][0][-2]
+ new = diffs[0][0][-1]
+
+ is_(new, None)
+ is_true(isinstance(old, sa.DefaultClause))
+
+ if exclusions.against(config, "postgresql"):
+ eq_(str(old.arg.text), "(bar + 42)")
+ elif exclusions.against(config, "oracle"):
+ eq_(str(old.arg.text), '"BAR"+42')
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_diffs.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_diffs.py
new file mode 100644
index 00000000..75bcd37a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_diffs.py
@@ -0,0 +1,273 @@
+from sqlalchemy import BigInteger
+from sqlalchemy import Column
+from sqlalchemy import Integer
+from sqlalchemy import MetaData
+from sqlalchemy import Table
+from sqlalchemy.testing import in_
+
+from ._autogen_fixtures import AutogenFixtureTest
+from ... import testing
+from ...testing import config
+from ...testing import eq_
+from ...testing import is_
+from ...testing import TestBase
+
+
+class AlterColumnTest(AutogenFixtureTest, TestBase):
+ __backend__ = True
+
+ @testing.combinations((True,), (False,))
+ @config.requirements.comments
+ def test_all_existings_filled(self, pk):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table("a", m1, Column("x", Integer, primary_key=pk))
+ Table("a", m2, Column("x", Integer, comment="x", primary_key=pk))
+
+ alter_col = self._assert_alter_col(m1, m2, pk)
+ eq_(alter_col.modify_comment, "x")
+
+ @testing.combinations((True,), (False,))
+ @config.requirements.comments
+ def test_all_existings_filled_in_notnull(self, pk):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table("a", m1, Column("x", Integer, nullable=False, primary_key=pk))
+ Table(
+ "a",
+ m2,
+ Column("x", Integer, nullable=False, comment="x", primary_key=pk),
+ )
+
+ self._assert_alter_col(m1, m2, pk, nullable=False)
+
+ @testing.combinations((True,), (False,))
+ @config.requirements.comments
+ def test_all_existings_filled_in_comment(self, pk):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table("a", m1, Column("x", Integer, comment="old", primary_key=pk))
+ Table("a", m2, Column("x", Integer, comment="new", primary_key=pk))
+
+ alter_col = self._assert_alter_col(m1, m2, pk)
+ eq_(alter_col.existing_comment, "old")
+
+ @testing.combinations((True,), (False,))
+ @config.requirements.comments
+ def test_all_existings_filled_in_server_default(self, pk):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "a", m1, Column("x", Integer, server_default="5", primary_key=pk)
+ )
+ Table(
+ "a",
+ m2,
+ Column(
+ "x", Integer, server_default="5", comment="new", primary_key=pk
+ ),
+ )
+
+ alter_col = self._assert_alter_col(m1, m2, pk)
+ in_("5", alter_col.existing_server_default.arg.text)
+
+ def _assert_alter_col(self, m1, m2, pk, nullable=None):
+ ops = self._fixture(m1, m2, return_ops=True)
+ modify_table = ops.ops[-1]
+ alter_col = modify_table.ops[0]
+
+ if nullable is None:
+ eq_(alter_col.existing_nullable, not pk)
+ else:
+ eq_(alter_col.existing_nullable, nullable)
+ assert alter_col.existing_type._compare_type_affinity(Integer())
+ return alter_col
+
+
+class AutoincrementTest(AutogenFixtureTest, TestBase):
+ __backend__ = True
+ __requires__ = ("integer_subtype_comparisons",)
+
+ def test_alter_column_autoincrement_none(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table("a", m1, Column("x", Integer, nullable=False))
+ Table("a", m2, Column("x", Integer, nullable=True))
+
+ ops = self._fixture(m1, m2, return_ops=True)
+ assert "autoincrement" not in ops.ops[0].ops[0].kw
+
+ def test_alter_column_autoincrement_pk_false(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "a",
+ m1,
+ Column("x", Integer, primary_key=True, autoincrement=False),
+ )
+ Table(
+ "a",
+ m2,
+ Column("x", BigInteger, primary_key=True, autoincrement=False),
+ )
+
+ ops = self._fixture(m1, m2, return_ops=True)
+ is_(ops.ops[0].ops[0].kw["autoincrement"], False)
+
+ def test_alter_column_autoincrement_pk_implicit_true(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table("a", m1, Column("x", Integer, primary_key=True))
+ Table("a", m2, Column("x", BigInteger, primary_key=True))
+
+ ops = self._fixture(m1, m2, return_ops=True)
+ is_(ops.ops[0].ops[0].kw["autoincrement"], True)
+
+ def test_alter_column_autoincrement_pk_explicit_true(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "a", m1, Column("x", Integer, primary_key=True, autoincrement=True)
+ )
+ Table(
+ "a",
+ m2,
+ Column("x", BigInteger, primary_key=True, autoincrement=True),
+ )
+
+ ops = self._fixture(m1, m2, return_ops=True)
+ is_(ops.ops[0].ops[0].kw["autoincrement"], True)
+
+ def test_alter_column_autoincrement_nonpk_false(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "a",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("x", Integer, autoincrement=False),
+ )
+ Table(
+ "a",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("x", BigInteger, autoincrement=False),
+ )
+
+ ops = self._fixture(m1, m2, return_ops=True)
+ is_(ops.ops[0].ops[0].kw["autoincrement"], False)
+
+ def test_alter_column_autoincrement_nonpk_implicit_false(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "a",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("x", Integer),
+ )
+ Table(
+ "a",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("x", BigInteger),
+ )
+
+ ops = self._fixture(m1, m2, return_ops=True)
+ assert "autoincrement" not in ops.ops[0].ops[0].kw
+
+ def test_alter_column_autoincrement_nonpk_explicit_true(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "a",
+ m1,
+ Column("id", Integer, primary_key=True, autoincrement=False),
+ Column("x", Integer, autoincrement=True),
+ )
+ Table(
+ "a",
+ m2,
+ Column("id", Integer, primary_key=True, autoincrement=False),
+ Column("x", BigInteger, autoincrement=True),
+ )
+
+ ops = self._fixture(m1, m2, return_ops=True)
+ is_(ops.ops[0].ops[0].kw["autoincrement"], True)
+
+ def test_alter_column_autoincrement_compositepk_false(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "a",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("x", Integer, primary_key=True, autoincrement=False),
+ )
+ Table(
+ "a",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("x", BigInteger, primary_key=True, autoincrement=False),
+ )
+
+ ops = self._fixture(m1, m2, return_ops=True)
+ is_(ops.ops[0].ops[0].kw["autoincrement"], False)
+
+ def test_alter_column_autoincrement_compositepk_implicit_false(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "a",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("x", Integer, primary_key=True),
+ )
+ Table(
+ "a",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("x", BigInteger, primary_key=True),
+ )
+
+ ops = self._fixture(m1, m2, return_ops=True)
+ assert "autoincrement" not in ops.ops[0].ops[0].kw
+
+ @config.requirements.autoincrement_on_composite_pk
+ def test_alter_column_autoincrement_compositepk_explicit_true(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "a",
+ m1,
+ Column("id", Integer, primary_key=True, autoincrement=False),
+ Column("x", Integer, primary_key=True, autoincrement=True),
+ # on SQLA 1.0 and earlier, this being present
+ # trips the "add KEY for the primary key" so that the
+ # AUTO_INCREMENT keyword is accepted by MySQL. SQLA 1.1 and
+ # greater the columns are just reorganized.
+ mysql_engine="InnoDB",
+ )
+ Table(
+ "a",
+ m2,
+ Column("id", Integer, primary_key=True, autoincrement=False),
+ Column("x", BigInteger, primary_key=True, autoincrement=True),
+ )
+
+ ops = self._fixture(m1, m2, return_ops=True)
+ is_(ops.ops[0].ops[0].kw["autoincrement"], True)
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_fks.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_fks.py
new file mode 100644
index 00000000..0240b98d
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_fks.py
@@ -0,0 +1,1190 @@
+from sqlalchemy import Column
+from sqlalchemy import ForeignKeyConstraint
+from sqlalchemy import Integer
+from sqlalchemy import MetaData
+from sqlalchemy import String
+from sqlalchemy import Table
+
+from ._autogen_fixtures import AutogenFixtureTest
+from ...testing import combinations
+from ...testing import config
+from ...testing import eq_
+from ...testing import mock
+from ...testing import TestBase
+
+
+class AutogenerateForeignKeysTest(AutogenFixtureTest, TestBase):
+ __backend__ = True
+ __requires__ = ("foreign_key_constraint_reflection",)
+
+ def test_remove_fk(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "some_table",
+ m1,
+ Column("test", String(10), primary_key=True),
+ )
+
+ Table(
+ "user",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50), nullable=False),
+ Column("a1", String(10), server_default="x"),
+ Column("test2", String(10)),
+ ForeignKeyConstraint(["test2"], ["some_table.test"]),
+ )
+
+ Table(
+ "some_table",
+ m2,
+ Column("test", String(10), primary_key=True),
+ )
+
+ Table(
+ "user",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50), nullable=False),
+ Column("a1", String(10), server_default="x"),
+ Column("test2", String(10)),
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ self._assert_fk_diff(
+ diffs[0],
+ "remove_fk",
+ "user",
+ ["test2"],
+ "some_table",
+ ["test"],
+ conditional_name="servergenerated",
+ )
+
+ def test_add_fk(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "some_table",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("test", String(10)),
+ )
+
+ Table(
+ "user",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50), nullable=False),
+ Column("a1", String(10), server_default="x"),
+ Column("test2", String(10)),
+ )
+
+ Table(
+ "some_table",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("test", String(10)),
+ )
+
+ Table(
+ "user",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50), nullable=False),
+ Column("a1", String(10), server_default="x"),
+ Column("test2", String(10)),
+ ForeignKeyConstraint(["test2"], ["some_table.test"]),
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ self._assert_fk_diff(
+ diffs[0], "add_fk", "user", ["test2"], "some_table", ["test"]
+ )
+
+ def test_no_change(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "some_table",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("test", String(10)),
+ )
+
+ Table(
+ "user",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50), nullable=False),
+ Column("a1", String(10), server_default="x"),
+ Column("test2", Integer),
+ ForeignKeyConstraint(["test2"], ["some_table.id"]),
+ )
+
+ Table(
+ "some_table",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("test", String(10)),
+ )
+
+ Table(
+ "user",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50), nullable=False),
+ Column("a1", String(10), server_default="x"),
+ Column("test2", Integer),
+ ForeignKeyConstraint(["test2"], ["some_table.id"]),
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ eq_(diffs, [])
+
+ def test_no_change_composite_fk(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "some_table",
+ m1,
+ Column("id_1", String(10), primary_key=True),
+ Column("id_2", String(10), primary_key=True),
+ )
+
+ Table(
+ "user",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50), nullable=False),
+ Column("a1", String(10), server_default="x"),
+ Column("other_id_1", String(10)),
+ Column("other_id_2", String(10)),
+ ForeignKeyConstraint(
+ ["other_id_1", "other_id_2"],
+ ["some_table.id_1", "some_table.id_2"],
+ ),
+ )
+
+ Table(
+ "some_table",
+ m2,
+ Column("id_1", String(10), primary_key=True),
+ Column("id_2", String(10), primary_key=True),
+ )
+
+ Table(
+ "user",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50), nullable=False),
+ Column("a1", String(10), server_default="x"),
+ Column("other_id_1", String(10)),
+ Column("other_id_2", String(10)),
+ ForeignKeyConstraint(
+ ["other_id_1", "other_id_2"],
+ ["some_table.id_1", "some_table.id_2"],
+ ),
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ eq_(diffs, [])
+
+ def test_casing_convention_changed_so_put_drops_first(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "some_table",
+ m1,
+ Column("test", String(10), primary_key=True),
+ )
+
+ Table(
+ "user",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50), nullable=False),
+ Column("a1", String(10), server_default="x"),
+ Column("test2", String(10)),
+ ForeignKeyConstraint(["test2"], ["some_table.test"], name="MyFK"),
+ )
+
+ Table(
+ "some_table",
+ m2,
+ Column("test", String(10), primary_key=True),
+ )
+
+ # foreign key autogen currently does not take "name" into account,
+ # so change the def just for the purposes of testing the
+ # add/drop order for now.
+ Table(
+ "user",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50), nullable=False),
+ Column("a1", String(10), server_default="x"),
+ Column("test2", String(10)),
+ ForeignKeyConstraint(["a1"], ["some_table.test"], name="myfk"),
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ self._assert_fk_diff(
+ diffs[0],
+ "remove_fk",
+ "user",
+ ["test2"],
+ "some_table",
+ ["test"],
+ name="MyFK" if config.requirements.fk_names.enabled else None,
+ )
+
+ self._assert_fk_diff(
+ diffs[1],
+ "add_fk",
+ "user",
+ ["a1"],
+ "some_table",
+ ["test"],
+ name="myfk",
+ )
+
+ def test_add_composite_fk_with_name(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "some_table",
+ m1,
+ Column("id_1", String(10), primary_key=True),
+ Column("id_2", String(10), primary_key=True),
+ )
+
+ Table(
+ "user",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50), nullable=False),
+ Column("a1", String(10), server_default="x"),
+ Column("other_id_1", String(10)),
+ Column("other_id_2", String(10)),
+ )
+
+ Table(
+ "some_table",
+ m2,
+ Column("id_1", String(10), primary_key=True),
+ Column("id_2", String(10), primary_key=True),
+ )
+
+ Table(
+ "user",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50), nullable=False),
+ Column("a1", String(10), server_default="x"),
+ Column("other_id_1", String(10)),
+ Column("other_id_2", String(10)),
+ ForeignKeyConstraint(
+ ["other_id_1", "other_id_2"],
+ ["some_table.id_1", "some_table.id_2"],
+ name="fk_test_name",
+ ),
+ )
+
+ diffs = self._fixture(m1, m2)
+ self._assert_fk_diff(
+ diffs[0],
+ "add_fk",
+ "user",
+ ["other_id_1", "other_id_2"],
+ "some_table",
+ ["id_1", "id_2"],
+ name="fk_test_name",
+ )
+
+ @config.requirements.no_name_normalize
+ def test_remove_composite_fk(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "some_table",
+ m1,
+ Column("id_1", String(10), primary_key=True),
+ Column("id_2", String(10), primary_key=True),
+ )
+
+ Table(
+ "user",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50), nullable=False),
+ Column("a1", String(10), server_default="x"),
+ Column("other_id_1", String(10)),
+ Column("other_id_2", String(10)),
+ ForeignKeyConstraint(
+ ["other_id_1", "other_id_2"],
+ ["some_table.id_1", "some_table.id_2"],
+ name="fk_test_name",
+ ),
+ )
+
+ Table(
+ "some_table",
+ m2,
+ Column("id_1", String(10), primary_key=True),
+ Column("id_2", String(10), primary_key=True),
+ )
+
+ Table(
+ "user",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50), nullable=False),
+ Column("a1", String(10), server_default="x"),
+ Column("other_id_1", String(10)),
+ Column("other_id_2", String(10)),
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ self._assert_fk_diff(
+ diffs[0],
+ "remove_fk",
+ "user",
+ ["other_id_1", "other_id_2"],
+ "some_table",
+ ["id_1", "id_2"],
+ conditional_name="fk_test_name",
+ )
+
+ def test_add_fk_colkeys(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "some_table",
+ m1,
+ Column("id_1", String(10), primary_key=True),
+ Column("id_2", String(10), primary_key=True),
+ )
+
+ Table(
+ "user",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("other_id_1", String(10)),
+ Column("other_id_2", String(10)),
+ )
+
+ Table(
+ "some_table",
+ m2,
+ Column("id_1", String(10), key="tid1", primary_key=True),
+ Column("id_2", String(10), key="tid2", primary_key=True),
+ )
+
+ Table(
+ "user",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("other_id_1", String(10), key="oid1"),
+ Column("other_id_2", String(10), key="oid2"),
+ ForeignKeyConstraint(
+ ["oid1", "oid2"],
+ ["some_table.tid1", "some_table.tid2"],
+ name="fk_test_name",
+ ),
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ self._assert_fk_diff(
+ diffs[0],
+ "add_fk",
+ "user",
+ ["other_id_1", "other_id_2"],
+ "some_table",
+ ["id_1", "id_2"],
+ name="fk_test_name",
+ )
+
+ def test_no_change_colkeys(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "some_table",
+ m1,
+ Column("id_1", String(10), primary_key=True),
+ Column("id_2", String(10), primary_key=True),
+ )
+
+ Table(
+ "user",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("other_id_1", String(10)),
+ Column("other_id_2", String(10)),
+ ForeignKeyConstraint(
+ ["other_id_1", "other_id_2"],
+ ["some_table.id_1", "some_table.id_2"],
+ ),
+ )
+
+ Table(
+ "some_table",
+ m2,
+ Column("id_1", String(10), key="tid1", primary_key=True),
+ Column("id_2", String(10), key="tid2", primary_key=True),
+ )
+
+ Table(
+ "user",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("other_id_1", String(10), key="oid1"),
+ Column("other_id_2", String(10), key="oid2"),
+ ForeignKeyConstraint(
+ ["oid1", "oid2"], ["some_table.tid1", "some_table.tid2"]
+ ),
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ eq_(diffs, [])
+
+
+class IncludeHooksTest(AutogenFixtureTest, TestBase):
+ __backend__ = True
+ __requires__ = ("fk_names",)
+
+ @combinations(("object",), ("name",))
+ @config.requirements.no_name_normalize
+ def test_remove_connection_fk(self, hook_type):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ ref = Table(
+ "ref",
+ m1,
+ Column("id", Integer, primary_key=True),
+ )
+ t1 = Table(
+ "t",
+ m1,
+ Column("x", Integer),
+ Column("y", Integer),
+ )
+ t1.append_constraint(
+ ForeignKeyConstraint([t1.c.x], [ref.c.id], name="fk1")
+ )
+ t1.append_constraint(
+ ForeignKeyConstraint([t1.c.y], [ref.c.id], name="fk2")
+ )
+
+ ref = Table(
+ "ref",
+ m2,
+ Column("id", Integer, primary_key=True),
+ )
+ Table(
+ "t",
+ m2,
+ Column("x", Integer),
+ Column("y", Integer),
+ )
+
+ if hook_type == "object":
+
+ def include_object(object_, name, type_, reflected, compare_to):
+ return not (
+ isinstance(object_, ForeignKeyConstraint)
+ and type_ == "foreign_key_constraint"
+ and reflected
+ and name == "fk1"
+ )
+
+ diffs = self._fixture(m1, m2, object_filters=include_object)
+ elif hook_type == "name":
+
+ def include_name(name, type_, parent_names):
+ if name == "fk1":
+ if type_ == "index": # MariaDB thing
+ return True
+ eq_(type_, "foreign_key_constraint")
+ eq_(
+ parent_names,
+ {
+ "schema_name": None,
+ "table_name": "t",
+ "schema_qualified_table_name": "t",
+ },
+ )
+ return False
+ else:
+ return True
+
+ diffs = self._fixture(m1, m2, name_filters=include_name)
+
+ self._assert_fk_diff(
+ diffs[0],
+ "remove_fk",
+ "t",
+ ["y"],
+ "ref",
+ ["id"],
+ conditional_name="fk2",
+ )
+ eq_(len(diffs), 1)
+
+ def test_add_metadata_fk(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "ref",
+ m1,
+ Column("id", Integer, primary_key=True),
+ )
+ Table(
+ "t",
+ m1,
+ Column("x", Integer),
+ Column("y", Integer),
+ )
+
+ ref = Table(
+ "ref",
+ m2,
+ Column("id", Integer, primary_key=True),
+ )
+ t2 = Table(
+ "t",
+ m2,
+ Column("x", Integer),
+ Column("y", Integer),
+ )
+ t2.append_constraint(
+ ForeignKeyConstraint([t2.c.x], [ref.c.id], name="fk1")
+ )
+ t2.append_constraint(
+ ForeignKeyConstraint([t2.c.y], [ref.c.id], name="fk2")
+ )
+
+ def include_object(object_, name, type_, reflected, compare_to):
+ return not (
+ isinstance(object_, ForeignKeyConstraint)
+ and type_ == "foreign_key_constraint"
+ and not reflected
+ and name == "fk1"
+ )
+
+ diffs = self._fixture(m1, m2, object_filters=include_object)
+
+ self._assert_fk_diff(
+ diffs[0], "add_fk", "t", ["y"], "ref", ["id"], name="fk2"
+ )
+ eq_(len(diffs), 1)
+
+ @combinations(("object",), ("name",))
+ @config.requirements.no_name_normalize
+ def test_change_fk(self, hook_type):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ r1a = Table(
+ "ref_a",
+ m1,
+ Column("a", Integer, primary_key=True),
+ )
+ Table(
+ "ref_b",
+ m1,
+ Column("a", Integer, primary_key=True),
+ Column("b", Integer, primary_key=True),
+ )
+ t1 = Table(
+ "t",
+ m1,
+ Column("x", Integer),
+ Column("y", Integer),
+ Column("z", Integer),
+ )
+ t1.append_constraint(
+ ForeignKeyConstraint([t1.c.x], [r1a.c.a], name="fk1")
+ )
+ t1.append_constraint(
+ ForeignKeyConstraint([t1.c.y], [r1a.c.a], name="fk2")
+ )
+
+ Table(
+ "ref_a",
+ m2,
+ Column("a", Integer, primary_key=True),
+ )
+ r2b = Table(
+ "ref_b",
+ m2,
+ Column("a", Integer, primary_key=True),
+ Column("b", Integer, primary_key=True),
+ )
+ t2 = Table(
+ "t",
+ m2,
+ Column("x", Integer),
+ Column("y", Integer),
+ Column("z", Integer),
+ )
+ t2.append_constraint(
+ ForeignKeyConstraint(
+ [t2.c.x, t2.c.z], [r2b.c.a, r2b.c.b], name="fk1"
+ )
+ )
+ t2.append_constraint(
+ ForeignKeyConstraint(
+ [t2.c.y, t2.c.z], [r2b.c.a, r2b.c.b], name="fk2"
+ )
+ )
+
+ if hook_type == "object":
+
+ def include_object(object_, name, type_, reflected, compare_to):
+ return not (
+ isinstance(object_, ForeignKeyConstraint)
+ and type_ == "foreign_key_constraint"
+ and name == "fk1"
+ )
+
+ diffs = self._fixture(m1, m2, object_filters=include_object)
+ elif hook_type == "name":
+
+ def include_name(name, type_, parent_names):
+ if type_ == "index":
+ return True # MariaDB thing
+
+ if name == "fk1":
+ eq_(type_, "foreign_key_constraint")
+ eq_(
+ parent_names,
+ {
+ "schema_name": None,
+ "table_name": "t",
+ "schema_qualified_table_name": "t",
+ },
+ )
+ return False
+ else:
+ return True
+
+ diffs = self._fixture(m1, m2, name_filters=include_name)
+
+ if hook_type == "object":
+ self._assert_fk_diff(
+ diffs[0], "remove_fk", "t", ["y"], "ref_a", ["a"], name="fk2"
+ )
+ self._assert_fk_diff(
+ diffs[1],
+ "add_fk",
+ "t",
+ ["y", "z"],
+ "ref_b",
+ ["a", "b"],
+ name="fk2",
+ )
+ eq_(len(diffs), 2)
+ elif hook_type == "name":
+ eq_(
+ {(d[0], d[1].name) for d in diffs},
+ {("add_fk", "fk2"), ("add_fk", "fk1"), ("remove_fk", "fk2")},
+ )
+
+
+class AutogenerateFKOptionsTest(AutogenFixtureTest, TestBase):
+ __backend__ = True
+
+ def _fk_opts_fixture(self, old_opts, new_opts):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "some_table",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("test", String(10)),
+ )
+
+ Table(
+ "user",
+ m1,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50), nullable=False),
+ Column("tid", Integer),
+ ForeignKeyConstraint(["tid"], ["some_table.id"], **old_opts),
+ )
+
+ Table(
+ "some_table",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("test", String(10)),
+ )
+
+ Table(
+ "user",
+ m2,
+ Column("id", Integer, primary_key=True),
+ Column("name", String(50), nullable=False),
+ Column("tid", Integer),
+ ForeignKeyConstraint(["tid"], ["some_table.id"], **new_opts),
+ )
+
+ return self._fixture(m1, m2)
+
+ @config.requirements.fk_ondelete_is_reflected
+ def test_add_ondelete(self):
+ diffs = self._fk_opts_fixture({}, {"ondelete": "cascade"})
+
+ self._assert_fk_diff(
+ diffs[0],
+ "remove_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ ondelete=None,
+ conditional_name="servergenerated",
+ )
+
+ self._assert_fk_diff(
+ diffs[1],
+ "add_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ ondelete="cascade",
+ )
+
+ @config.requirements.fk_ondelete_is_reflected
+ def test_remove_ondelete(self):
+ diffs = self._fk_opts_fixture({"ondelete": "CASCADE"}, {})
+
+ self._assert_fk_diff(
+ diffs[0],
+ "remove_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ ondelete="CASCADE",
+ conditional_name="servergenerated",
+ )
+
+ self._assert_fk_diff(
+ diffs[1],
+ "add_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ ondelete=None,
+ )
+
+ def test_nochange_ondelete(self):
+ """test case sensitivity"""
+ diffs = self._fk_opts_fixture(
+ {"ondelete": "caSCAde"}, {"ondelete": "CasCade"}
+ )
+ eq_(diffs, [])
+
+ @config.requirements.fk_onupdate_is_reflected
+ def test_add_onupdate(self):
+ diffs = self._fk_opts_fixture({}, {"onupdate": "cascade"})
+
+ self._assert_fk_diff(
+ diffs[0],
+ "remove_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ onupdate=None,
+ conditional_name="servergenerated",
+ )
+
+ self._assert_fk_diff(
+ diffs[1],
+ "add_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ onupdate="cascade",
+ )
+
+ @config.requirements.fk_onupdate_is_reflected
+ def test_remove_onupdate(self):
+ diffs = self._fk_opts_fixture({"onupdate": "CASCADE"}, {})
+
+ self._assert_fk_diff(
+ diffs[0],
+ "remove_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ onupdate="CASCADE",
+ conditional_name="servergenerated",
+ )
+
+ self._assert_fk_diff(
+ diffs[1],
+ "add_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ onupdate=None,
+ )
+
+ @config.requirements.fk_onupdate
+ def test_nochange_onupdate(self):
+ """test case sensitivity"""
+ diffs = self._fk_opts_fixture(
+ {"onupdate": "caSCAde"}, {"onupdate": "CasCade"}
+ )
+ eq_(diffs, [])
+
+ @config.requirements.fk_ondelete_restrict
+ def test_nochange_ondelete_restrict(self):
+ """test the RESTRICT option which MySQL doesn't report on"""
+
+ diffs = self._fk_opts_fixture(
+ {"ondelete": "restrict"}, {"ondelete": "restrict"}
+ )
+ eq_(diffs, [])
+
+ @config.requirements.fk_onupdate_restrict
+ def test_nochange_onupdate_restrict(self):
+ """test the RESTRICT option which MySQL doesn't report on"""
+
+ diffs = self._fk_opts_fixture(
+ {"onupdate": "restrict"}, {"onupdate": "restrict"}
+ )
+ eq_(diffs, [])
+
+ @config.requirements.fk_ondelete_noaction
+ def test_nochange_ondelete_noaction(self):
+ """test the NO ACTION option which generally comes back as None"""
+
+ diffs = self._fk_opts_fixture(
+ {"ondelete": "no action"}, {"ondelete": "no action"}
+ )
+ eq_(diffs, [])
+
+ @config.requirements.fk_onupdate
+ def test_nochange_onupdate_noaction(self):
+ """test the NO ACTION option which generally comes back as None"""
+
+ diffs = self._fk_opts_fixture(
+ {"onupdate": "no action"}, {"onupdate": "no action"}
+ )
+ eq_(diffs, [])
+
+ @config.requirements.fk_ondelete_restrict
+ def test_change_ondelete_from_restrict(self):
+ """test the RESTRICT option which MySQL doesn't report on"""
+
+ # note that this is impossible to detect if we change
+ # from RESTRICT to NO ACTION on MySQL.
+ diffs = self._fk_opts_fixture(
+ {"ondelete": "restrict"}, {"ondelete": "cascade"}
+ )
+ self._assert_fk_diff(
+ diffs[0],
+ "remove_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ onupdate=None,
+ ondelete=mock.ANY, # MySQL reports None, PG reports RESTRICT
+ conditional_name="servergenerated",
+ )
+
+ self._assert_fk_diff(
+ diffs[1],
+ "add_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ onupdate=None,
+ ondelete="cascade",
+ )
+
+ @config.requirements.fk_ondelete_restrict
+ def test_change_onupdate_from_restrict(self):
+ """test the RESTRICT option which MySQL doesn't report on"""
+
+ # note that this is impossible to detect if we change
+ # from RESTRICT to NO ACTION on MySQL.
+ diffs = self._fk_opts_fixture(
+ {"onupdate": "restrict"}, {"onupdate": "cascade"}
+ )
+ self._assert_fk_diff(
+ diffs[0],
+ "remove_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ onupdate=mock.ANY, # MySQL reports None, PG reports RESTRICT
+ ondelete=None,
+ conditional_name="servergenerated",
+ )
+
+ self._assert_fk_diff(
+ diffs[1],
+ "add_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ onupdate="cascade",
+ ondelete=None,
+ )
+
+ @config.requirements.fk_ondelete_is_reflected
+ @config.requirements.fk_onupdate_is_reflected
+ def test_ondelete_onupdate_combo(self):
+ diffs = self._fk_opts_fixture(
+ {"onupdate": "CASCADE", "ondelete": "SET NULL"},
+ {"onupdate": "RESTRICT", "ondelete": "RESTRICT"},
+ )
+
+ self._assert_fk_diff(
+ diffs[0],
+ "remove_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ onupdate="CASCADE",
+ ondelete="SET NULL",
+ conditional_name="servergenerated",
+ )
+
+ self._assert_fk_diff(
+ diffs[1],
+ "add_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ onupdate="RESTRICT",
+ ondelete="RESTRICT",
+ )
+
+ @config.requirements.fk_initially
+ def test_add_initially_deferred(self):
+ diffs = self._fk_opts_fixture({}, {"initially": "deferred"})
+
+ self._assert_fk_diff(
+ diffs[0],
+ "remove_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ initially=None,
+ conditional_name="servergenerated",
+ )
+
+ self._assert_fk_diff(
+ diffs[1],
+ "add_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ initially="deferred",
+ )
+
+ @config.requirements.fk_initially
+ def test_remove_initially_deferred(self):
+ diffs = self._fk_opts_fixture({"initially": "deferred"}, {})
+
+ self._assert_fk_diff(
+ diffs[0],
+ "remove_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ initially="DEFERRED",
+ deferrable=True,
+ conditional_name="servergenerated",
+ )
+
+ self._assert_fk_diff(
+ diffs[1],
+ "add_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ initially=None,
+ )
+
+ @config.requirements.fk_deferrable
+ @config.requirements.fk_initially
+ def test_add_initially_immediate_plus_deferrable(self):
+ diffs = self._fk_opts_fixture(
+ {}, {"initially": "immediate", "deferrable": True}
+ )
+
+ self._assert_fk_diff(
+ diffs[0],
+ "remove_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ initially=None,
+ conditional_name="servergenerated",
+ )
+
+ self._assert_fk_diff(
+ diffs[1],
+ "add_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ initially="immediate",
+ deferrable=True,
+ )
+
+ @config.requirements.fk_deferrable
+ @config.requirements.fk_initially
+ def test_remove_initially_immediate_plus_deferrable(self):
+ diffs = self._fk_opts_fixture(
+ {"initially": "immediate", "deferrable": True}, {}
+ )
+
+ self._assert_fk_diff(
+ diffs[0],
+ "remove_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ initially=None, # immediate is the default
+ deferrable=True,
+ conditional_name="servergenerated",
+ )
+
+ self._assert_fk_diff(
+ diffs[1],
+ "add_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ initially=None,
+ deferrable=None,
+ )
+
+ @config.requirements.fk_initially
+ @config.requirements.fk_deferrable
+ def test_add_initially_deferrable_nochange_one(self):
+ diffs = self._fk_opts_fixture(
+ {"deferrable": True, "initially": "immediate"},
+ {"deferrable": True, "initially": "immediate"},
+ )
+
+ eq_(diffs, [])
+
+ @config.requirements.fk_initially
+ @config.requirements.fk_deferrable
+ def test_add_initially_deferrable_nochange_two(self):
+ diffs = self._fk_opts_fixture(
+ {"deferrable": True, "initially": "deferred"},
+ {"deferrable": True, "initially": "deferred"},
+ )
+
+ eq_(diffs, [])
+
+ @config.requirements.fk_initially
+ @config.requirements.fk_deferrable
+ def test_add_initially_deferrable_nochange_three(self):
+ diffs = self._fk_opts_fixture(
+ {"deferrable": None, "initially": "deferred"},
+ {"deferrable": None, "initially": "deferred"},
+ )
+
+ eq_(diffs, [])
+
+ @config.requirements.fk_deferrable
+ def test_add_deferrable(self):
+ diffs = self._fk_opts_fixture({}, {"deferrable": True})
+
+ self._assert_fk_diff(
+ diffs[0],
+ "remove_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ deferrable=None,
+ conditional_name="servergenerated",
+ )
+
+ self._assert_fk_diff(
+ diffs[1],
+ "add_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ deferrable=True,
+ )
+
+ @config.requirements.fk_deferrable_is_reflected
+ def test_remove_deferrable(self):
+ diffs = self._fk_opts_fixture({"deferrable": True}, {})
+
+ self._assert_fk_diff(
+ diffs[0],
+ "remove_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ deferrable=True,
+ conditional_name="servergenerated",
+ )
+
+ self._assert_fk_diff(
+ diffs[1],
+ "add_fk",
+ "user",
+ ["tid"],
+ "some_table",
+ ["id"],
+ deferrable=None,
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_identity.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_identity.py
new file mode 100644
index 00000000..3dee9fc9
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_autogen_identity.py
@@ -0,0 +1,226 @@
+import sqlalchemy as sa
+from sqlalchemy import Column
+from sqlalchemy import Integer
+from sqlalchemy import MetaData
+from sqlalchemy import Table
+
+from alembic.util import sqla_compat
+from ._autogen_fixtures import AutogenFixtureTest
+from ... import testing
+from ...testing import config
+from ...testing import eq_
+from ...testing import is_true
+from ...testing import TestBase
+
+
+class AutogenerateIdentityTest(AutogenFixtureTest, TestBase):
+ __requires__ = ("identity_columns",)
+ __backend__ = True
+
+ def test_add_identity_column(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table("user", m1, Column("other", sa.Text))
+
+ Table(
+ "user",
+ m2,
+ Column("other", sa.Text),
+ Column(
+ "id",
+ Integer,
+ sa.Identity(start=5, increment=7),
+ primary_key=True,
+ ),
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ eq_(diffs[0][0], "add_column")
+ eq_(diffs[0][2], "user")
+ eq_(diffs[0][3].name, "id")
+ i = diffs[0][3].identity
+
+ is_true(isinstance(i, sa.Identity))
+ eq_(i.start, 5)
+ eq_(i.increment, 7)
+
+ def test_remove_identity_column(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "user",
+ m1,
+ Column(
+ "id",
+ Integer,
+ sa.Identity(start=2, increment=3),
+ primary_key=True,
+ ),
+ )
+
+ Table("user", m2)
+
+ diffs = self._fixture(m1, m2)
+
+ eq_(diffs[0][0], "remove_column")
+ eq_(diffs[0][2], "user")
+ c = diffs[0][3]
+ eq_(c.name, "id")
+
+ is_true(isinstance(c.identity, sa.Identity))
+ eq_(c.identity.start, 2)
+ eq_(c.identity.increment, 3)
+
+ def test_no_change_identity_column(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ for m in (m1, m2):
+ id_ = sa.Identity(start=2)
+ Table("user", m, Column("id", Integer, id_))
+
+ diffs = self._fixture(m1, m2)
+
+ eq_(diffs, [])
+
+ def test_dialect_kwargs_changes(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ if sqla_compat.identity_has_dialect_kwargs:
+ args = {"oracle_on_null": True, "oracle_order": True}
+ else:
+ args = {"on_null": True, "order": True}
+
+ Table("user", m1, Column("id", Integer, sa.Identity(start=2)))
+ id_ = sa.Identity(start=2, **args)
+ Table("user", m2, Column("id", Integer, id_))
+
+ diffs = self._fixture(m1, m2)
+ if config.db.name == "oracle":
+ is_true(len(diffs), 1)
+ eq_(diffs[0][0][0], "modify_default")
+ else:
+ eq_(diffs, [])
+
+ @testing.combinations(
+ (None, dict(start=2)),
+ (dict(start=2), None),
+ (dict(start=2), dict(start=2, increment=7)),
+ (dict(always=False), dict(always=True)),
+ (
+ dict(start=1, minvalue=0, maxvalue=100, cycle=True),
+ dict(start=1, minvalue=0, maxvalue=100, cycle=False),
+ ),
+ (
+ dict(start=10, increment=3, maxvalue=9999),
+ dict(start=10, increment=1, maxvalue=3333),
+ ),
+ )
+ @config.requirements.identity_columns_alter
+ def test_change_identity(self, before, after):
+ arg_before = (sa.Identity(**before),) if before else ()
+ arg_after = (sa.Identity(**after),) if after else ()
+
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "user",
+ m1,
+ Column("id", Integer, *arg_before),
+ Column("other", sa.Text),
+ )
+
+ Table(
+ "user",
+ m2,
+ Column("id", Integer, *arg_after),
+ Column("other", sa.Text),
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ eq_(len(diffs[0]), 1)
+ diffs = diffs[0][0]
+ eq_(diffs[0], "modify_default")
+ eq_(diffs[2], "user")
+ eq_(diffs[3], "id")
+ old = diffs[5]
+ new = diffs[6]
+
+ def check(kw, idt):
+ if kw:
+ is_true(isinstance(idt, sa.Identity))
+ for k, v in kw.items():
+ eq_(getattr(idt, k), v)
+ else:
+ is_true(idt in (None, False))
+
+ check(before, old)
+ check(after, new)
+
+ def test_add_identity_to_column(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "user",
+ m1,
+ Column("id", Integer),
+ Column("other", sa.Text),
+ )
+
+ Table(
+ "user",
+ m2,
+ Column("id", Integer, sa.Identity(start=2, maxvalue=1000)),
+ Column("other", sa.Text),
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ eq_(len(diffs[0]), 1)
+ diffs = diffs[0][0]
+ eq_(diffs[0], "modify_default")
+ eq_(diffs[2], "user")
+ eq_(diffs[3], "id")
+ eq_(diffs[5], None)
+ added = diffs[6]
+
+ is_true(isinstance(added, sa.Identity))
+ eq_(added.start, 2)
+ eq_(added.maxvalue, 1000)
+
+ def test_remove_identity_from_column(self):
+ m1 = MetaData()
+ m2 = MetaData()
+
+ Table(
+ "user",
+ m1,
+ Column("id", Integer, sa.Identity(start=2, maxvalue=1000)),
+ Column("other", sa.Text),
+ )
+
+ Table(
+ "user",
+ m2,
+ Column("id", Integer),
+ Column("other", sa.Text),
+ )
+
+ diffs = self._fixture(m1, m2)
+
+ eq_(len(diffs[0]), 1)
+ diffs = diffs[0][0]
+ eq_(diffs[0], "modify_default")
+ eq_(diffs[2], "user")
+ eq_(diffs[3], "id")
+ eq_(diffs[6], None)
+ removed = diffs[5]
+
+ is_true(isinstance(removed, sa.Identity))
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_environment.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_environment.py
new file mode 100644
index 00000000..8c86859a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_environment.py
@@ -0,0 +1,364 @@
+import io
+
+from ...migration import MigrationContext
+from ...testing import assert_raises
+from ...testing import config
+from ...testing import eq_
+from ...testing import is_
+from ...testing import is_false
+from ...testing import is_not_
+from ...testing import is_true
+from ...testing import ne_
+from ...testing.fixtures import TestBase
+
+
+class MigrationTransactionTest(TestBase):
+ __backend__ = True
+
+ conn = None
+
+ def _fixture(self, opts):
+ self.conn = conn = config.db.connect()
+
+ if opts.get("as_sql", False):
+ self.context = MigrationContext.configure(
+ dialect=conn.dialect, opts=opts
+ )
+ self.context.output_buffer = (
+ self.context.impl.output_buffer
+ ) = io.StringIO()
+ else:
+ self.context = MigrationContext.configure(
+ connection=conn, opts=opts
+ )
+ return self.context
+
+ def teardown_method(self):
+ if self.conn:
+ self.conn.close()
+
+ def test_proxy_transaction_rollback(self):
+ context = self._fixture(
+ {"transaction_per_migration": True, "transactional_ddl": True}
+ )
+
+ is_false(self.conn.in_transaction())
+ proxy = context.begin_transaction(_per_migration=True)
+ is_true(self.conn.in_transaction())
+ proxy.rollback()
+ is_false(self.conn.in_transaction())
+
+ def test_proxy_transaction_commit(self):
+ context = self._fixture(
+ {"transaction_per_migration": True, "transactional_ddl": True}
+ )
+ proxy = context.begin_transaction(_per_migration=True)
+ is_true(self.conn.in_transaction())
+ proxy.commit()
+ is_false(self.conn.in_transaction())
+
+ def test_proxy_transaction_contextmanager_commit(self):
+ context = self._fixture(
+ {"transaction_per_migration": True, "transactional_ddl": True}
+ )
+ proxy = context.begin_transaction(_per_migration=True)
+ is_true(self.conn.in_transaction())
+ with proxy:
+ pass
+ is_false(self.conn.in_transaction())
+
+ def test_proxy_transaction_contextmanager_rollback(self):
+ context = self._fixture(
+ {"transaction_per_migration": True, "transactional_ddl": True}
+ )
+ proxy = context.begin_transaction(_per_migration=True)
+ is_true(self.conn.in_transaction())
+
+ def go():
+ with proxy:
+ raise Exception("hi")
+
+ assert_raises(Exception, go)
+ is_false(self.conn.in_transaction())
+
+ def test_proxy_transaction_contextmanager_explicit_rollback(self):
+ context = self._fixture(
+ {"transaction_per_migration": True, "transactional_ddl": True}
+ )
+ proxy = context.begin_transaction(_per_migration=True)
+ is_true(self.conn.in_transaction())
+
+ with proxy:
+ is_true(self.conn.in_transaction())
+ proxy.rollback()
+ is_false(self.conn.in_transaction())
+
+ is_false(self.conn.in_transaction())
+
+ def test_proxy_transaction_contextmanager_explicit_commit(self):
+ context = self._fixture(
+ {"transaction_per_migration": True, "transactional_ddl": True}
+ )
+ proxy = context.begin_transaction(_per_migration=True)
+ is_true(self.conn.in_transaction())
+
+ with proxy:
+ is_true(self.conn.in_transaction())
+ proxy.commit()
+ is_false(self.conn.in_transaction())
+
+ is_false(self.conn.in_transaction())
+
+ def test_transaction_per_migration_transactional_ddl(self):
+ context = self._fixture(
+ {"transaction_per_migration": True, "transactional_ddl": True}
+ )
+
+ is_false(self.conn.in_transaction())
+
+ with context.begin_transaction():
+ is_false(self.conn.in_transaction())
+ with context.begin_transaction(_per_migration=True):
+ is_true(self.conn.in_transaction())
+
+ is_false(self.conn.in_transaction())
+ is_false(self.conn.in_transaction())
+
+ def test_transaction_per_migration_non_transactional_ddl(self):
+ context = self._fixture(
+ {"transaction_per_migration": True, "transactional_ddl": False}
+ )
+
+ is_false(self.conn.in_transaction())
+
+ with context.begin_transaction():
+ is_false(self.conn.in_transaction())
+ with context.begin_transaction(_per_migration=True):
+ is_true(self.conn.in_transaction())
+
+ is_false(self.conn.in_transaction())
+ is_false(self.conn.in_transaction())
+
+ def test_transaction_per_all_transactional_ddl(self):
+ context = self._fixture({"transactional_ddl": True})
+
+ is_false(self.conn.in_transaction())
+
+ with context.begin_transaction():
+ is_true(self.conn.in_transaction())
+ with context.begin_transaction(_per_migration=True):
+ is_true(self.conn.in_transaction())
+
+ is_true(self.conn.in_transaction())
+ is_false(self.conn.in_transaction())
+
+ def test_transaction_per_all_non_transactional_ddl(self):
+ context = self._fixture({"transactional_ddl": False})
+
+ is_false(self.conn.in_transaction())
+
+ with context.begin_transaction():
+ is_false(self.conn.in_transaction())
+ with context.begin_transaction(_per_migration=True):
+ is_true(self.conn.in_transaction())
+
+ is_false(self.conn.in_transaction())
+ is_false(self.conn.in_transaction())
+
+ def test_transaction_per_all_sqlmode(self):
+ context = self._fixture({"as_sql": True})
+
+ context.execute("step 1")
+ with context.begin_transaction():
+ context.execute("step 2")
+ with context.begin_transaction(_per_migration=True):
+ context.execute("step 3")
+
+ context.execute("step 4")
+ context.execute("step 5")
+
+ if context.impl.transactional_ddl:
+ self._assert_impl_steps(
+ "step 1",
+ "BEGIN",
+ "step 2",
+ "step 3",
+ "step 4",
+ "COMMIT",
+ "step 5",
+ )
+ else:
+ self._assert_impl_steps(
+ "step 1", "step 2", "step 3", "step 4", "step 5"
+ )
+
+ def test_transaction_per_migration_sqlmode(self):
+ context = self._fixture(
+ {"as_sql": True, "transaction_per_migration": True}
+ )
+
+ context.execute("step 1")
+ with context.begin_transaction():
+ context.execute("step 2")
+ with context.begin_transaction(_per_migration=True):
+ context.execute("step 3")
+
+ context.execute("step 4")
+ context.execute("step 5")
+
+ if context.impl.transactional_ddl:
+ self._assert_impl_steps(
+ "step 1",
+ "step 2",
+ "BEGIN",
+ "step 3",
+ "COMMIT",
+ "step 4",
+ "step 5",
+ )
+ else:
+ self._assert_impl_steps(
+ "step 1", "step 2", "step 3", "step 4", "step 5"
+ )
+
+ @config.requirements.autocommit_isolation
+ def test_autocommit_block(self):
+ context = self._fixture({"transaction_per_migration": True})
+
+ is_false(self.conn.in_transaction())
+
+ with context.begin_transaction():
+ is_false(self.conn.in_transaction())
+ with context.begin_transaction(_per_migration=True):
+ is_true(self.conn.in_transaction())
+
+ with context.autocommit_block():
+ # in 1.x, self.conn is separate due to the
+ # execution_options call. however for future they are the
+ # same connection and there is a "transaction" block
+ # despite autocommit
+ if self.is_sqlalchemy_future:
+ is_(context.connection, self.conn)
+ else:
+ is_not_(context.connection, self.conn)
+ is_false(self.conn.in_transaction())
+
+ eq_(
+ context.connection._execution_options[
+ "isolation_level"
+ ],
+ "AUTOCOMMIT",
+ )
+
+ ne_(
+ context.connection._execution_options.get(
+ "isolation_level", None
+ ),
+ "AUTOCOMMIT",
+ )
+ is_true(self.conn.in_transaction())
+
+ is_false(self.conn.in_transaction())
+ is_false(self.conn.in_transaction())
+
+ @config.requirements.autocommit_isolation
+ def test_autocommit_block_no_transaction(self):
+ context = self._fixture({"transaction_per_migration": True})
+
+ is_false(self.conn.in_transaction())
+
+ with context.autocommit_block():
+ is_true(context.connection.in_transaction())
+
+ # in 1.x, self.conn is separate due to the execution_options
+ # call. however for future they are the same connection and there
+ # is a "transaction" block despite autocommit
+ if self.is_sqlalchemy_future:
+ is_(context.connection, self.conn)
+ else:
+ is_not_(context.connection, self.conn)
+ is_false(self.conn.in_transaction())
+
+ eq_(
+ context.connection._execution_options["isolation_level"],
+ "AUTOCOMMIT",
+ )
+
+ ne_(
+ context.connection._execution_options.get("isolation_level", None),
+ "AUTOCOMMIT",
+ )
+
+ is_false(self.conn.in_transaction())
+
+ def test_autocommit_block_transactional_ddl_sqlmode(self):
+ context = self._fixture(
+ {
+ "transaction_per_migration": True,
+ "transactional_ddl": True,
+ "as_sql": True,
+ }
+ )
+
+ with context.begin_transaction():
+ context.execute("step 1")
+ with context.begin_transaction(_per_migration=True):
+ context.execute("step 2")
+
+ with context.autocommit_block():
+ context.execute("step 3")
+
+ context.execute("step 4")
+
+ context.execute("step 5")
+
+ self._assert_impl_steps(
+ "step 1",
+ "BEGIN",
+ "step 2",
+ "COMMIT",
+ "step 3",
+ "BEGIN",
+ "step 4",
+ "COMMIT",
+ "step 5",
+ )
+
+ def test_autocommit_block_nontransactional_ddl_sqlmode(self):
+ context = self._fixture(
+ {
+ "transaction_per_migration": True,
+ "transactional_ddl": False,
+ "as_sql": True,
+ }
+ )
+
+ with context.begin_transaction():
+ context.execute("step 1")
+ with context.begin_transaction(_per_migration=True):
+ context.execute("step 2")
+
+ with context.autocommit_block():
+ context.execute("step 3")
+
+ context.execute("step 4")
+
+ context.execute("step 5")
+
+ self._assert_impl_steps(
+ "step 1", "step 2", "step 3", "step 4", "step 5"
+ )
+
+ def _assert_impl_steps(self, *steps):
+ to_check = self.context.output_buffer.getvalue()
+
+ self.context.impl.output_buffer = buf = io.StringIO()
+ for step in steps:
+ if step == "BEGIN":
+ self.context.impl.emit_begin()
+ elif step == "COMMIT":
+ self.context.impl.emit_commit()
+ else:
+ self.context.impl._exec(step)
+
+ eq_(to_check, buf.getvalue())
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_op.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_op.py
new file mode 100644
index 00000000..a63b3f2f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/testing/suite/test_op.py
@@ -0,0 +1,42 @@
+"""Test against the builders in the op.* module."""
+
+from sqlalchemy import Column
+from sqlalchemy import event
+from sqlalchemy import Integer
+from sqlalchemy import String
+from sqlalchemy import Table
+from sqlalchemy.sql import text
+
+from ...testing.fixtures import AlterColRoundTripFixture
+from ...testing.fixtures import TestBase
+
+
+@event.listens_for(Table, "after_parent_attach")
+def _add_cols(table, metadata):
+ if table.name == "tbl_with_auto_appended_column":
+ table.append_column(Column("bat", Integer))
+
+
+class BackendAlterColumnTest(AlterColRoundTripFixture, TestBase):
+ __backend__ = True
+
+ def test_rename_column(self):
+ self._run_alter_col({}, {"name": "newname"})
+
+ def test_modify_type_int_str(self):
+ self._run_alter_col({"type": Integer()}, {"type": String(50)})
+
+ def test_add_server_default_int(self):
+ self._run_alter_col({"type": Integer}, {"server_default": text("5")})
+
+ def test_modify_server_default_int(self):
+ self._run_alter_col(
+ {"type": Integer, "server_default": text("2")},
+ {"server_default": text("5")},
+ )
+
+ def test_modify_nullable_to_non(self):
+ self._run_alter_col({}, {"nullable": False})
+
+ def test_modify_non_nullable_to_nullable(self):
+ self._run_alter_col({"nullable": False}, {"nullable": True})
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/util.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/util.py
new file mode 100644
index 00000000..4517a69f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/testing/util.py
@@ -0,0 +1,126 @@
+# testing/util.py
+# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
+#
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+from __future__ import annotations
+
+import types
+from typing import Union
+
+from sqlalchemy.util import inspect_getfullargspec
+
+from ..util import sqla_2
+
+
+def flag_combinations(*combinations):
+ """A facade around @testing.combinations() oriented towards boolean
+ keyword-based arguments.
+
+ Basically generates a nice looking identifier based on the keywords
+ and also sets up the argument names.
+
+ E.g.::
+
+ @testing.flag_combinations(
+ dict(lazy=False, passive=False),
+ dict(lazy=True, passive=False),
+ dict(lazy=False, passive=True),
+ dict(lazy=False, passive=True, raiseload=True),
+ )
+
+
+ would result in::
+
+ @testing.combinations(
+ ('', False, False, False),
+ ('lazy', True, False, False),
+ ('lazy_passive', True, True, False),
+ ('lazy_passive', True, True, True),
+ id_='iaaa',
+ argnames='lazy,passive,raiseload'
+ )
+
+ """
+ from sqlalchemy.testing import config
+
+ keys = set()
+
+ for d in combinations:
+ keys.update(d)
+
+ keys = sorted(keys)
+
+ return config.combinations(
+ *[
+ ("_".join(k for k in keys if d.get(k, False)),)
+ + tuple(d.get(k, False) for k in keys)
+ for d in combinations
+ ],
+ id_="i" + ("a" * len(keys)),
+ argnames=",".join(keys),
+ )
+
+
+def resolve_lambda(__fn, **kw):
+ """Given a no-arg lambda and a namespace, return a new lambda that
+ has all the values filled in.
+
+ This is used so that we can have module-level fixtures that
+ refer to instance-level variables using lambdas.
+
+ """
+
+ pos_args = inspect_getfullargspec(__fn)[0]
+ pass_pos_args = {arg: kw.pop(arg) for arg in pos_args}
+ glb = dict(__fn.__globals__)
+ glb.update(kw)
+ new_fn = types.FunctionType(__fn.__code__, glb)
+ return new_fn(**pass_pos_args)
+
+
+def metadata_fixture(ddl="function"):
+ """Provide MetaData for a pytest fixture."""
+
+ from sqlalchemy.testing import config
+ from . import fixture_functions
+
+ def decorate(fn):
+ def run_ddl(self):
+ from sqlalchemy import schema
+
+ metadata = self.metadata = schema.MetaData()
+ try:
+ result = fn(self, metadata)
+ metadata.create_all(config.db)
+ # TODO:
+ # somehow get a per-function dml erase fixture here
+ yield result
+ finally:
+ metadata.drop_all(config.db)
+
+ return fixture_functions.fixture(scope=ddl)(run_ddl)
+
+ return decorate
+
+
+def _safe_int(value: str) -> Union[int, str]:
+ try:
+ return int(value)
+ except:
+ return value
+
+
+def testing_engine(url=None, options=None, future=False):
+ from sqlalchemy.testing import config
+ from sqlalchemy.testing.engines import testing_engine
+
+ if not future:
+ future = getattr(config._current.options, "future_engine", False)
+
+ if not sqla_2:
+ kw = {"future": future} if future else {}
+ else:
+ kw = {}
+ return testing_engine(url, options, **kw)
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/testing/warnings.py b/Backend/venv/lib/python3.12/site-packages/alembic/testing/warnings.py
new file mode 100644
index 00000000..e87136b8
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/testing/warnings.py
@@ -0,0 +1,40 @@
+# testing/warnings.py
+# Copyright (C) 2005-2021 the SQLAlchemy authors and contributors
+#
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+
+import warnings
+
+from sqlalchemy import exc as sa_exc
+
+from ..util import sqla_14
+
+
+def setup_filters():
+ """Set global warning behavior for the test suite."""
+
+ warnings.resetwarnings()
+
+ warnings.filterwarnings("error", category=sa_exc.SADeprecationWarning)
+ warnings.filterwarnings("error", category=sa_exc.SAWarning)
+
+ # some selected deprecations...
+ warnings.filterwarnings("error", category=DeprecationWarning)
+ if not sqla_14:
+ # 1.3 uses pkg_resources in PluginLoader
+ warnings.filterwarnings(
+ "ignore",
+ "pkg_resources is deprecated as an API",
+ DeprecationWarning,
+ )
+ try:
+ import pytest
+ except ImportError:
+ pass
+ else:
+ warnings.filterwarnings(
+ "once", category=pytest.PytestDeprecationWarning
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/__init__.py b/Backend/venv/lib/python3.12/site-packages/alembic/util/__init__.py
new file mode 100644
index 00000000..3c1e27ca
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/util/__init__.py
@@ -0,0 +1,35 @@
+from .editor import open_in_editor
+from .exc import AutogenerateDiffsDetected
+from .exc import CommandError
+from .langhelpers import _with_legacy_names
+from .langhelpers import asbool
+from .langhelpers import dedupe_tuple
+from .langhelpers import Dispatcher
+from .langhelpers import EMPTY_DICT
+from .langhelpers import immutabledict
+from .langhelpers import memoized_property
+from .langhelpers import ModuleClsProxy
+from .langhelpers import not_none
+from .langhelpers import rev_id
+from .langhelpers import to_list
+from .langhelpers import to_tuple
+from .langhelpers import unique_list
+from .messaging import err
+from .messaging import format_as_comma
+from .messaging import msg
+from .messaging import obfuscate_url_pw
+from .messaging import status
+from .messaging import warn
+from .messaging import write_outstream
+from .pyfiles import coerce_resource_to_filename
+from .pyfiles import load_python_file
+from .pyfiles import pyc_file_from_path
+from .pyfiles import template_to_file
+from .sqla_compat import has_computed
+from .sqla_compat import sqla_13
+from .sqla_compat import sqla_14
+from .sqla_compat import sqla_2
+
+
+if not sqla_13:
+ raise CommandError("SQLAlchemy 1.3.0 or greater is required.")
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..b1e045a1
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/compat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/compat.cpython-312.pyc
new file mode 100644
index 00000000..6c212ec0
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/compat.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/editor.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/editor.cpython-312.pyc
new file mode 100644
index 00000000..9ae4cc75
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/editor.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/exc.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/exc.cpython-312.pyc
new file mode 100644
index 00000000..522f89fd
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/exc.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/langhelpers.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/langhelpers.cpython-312.pyc
new file mode 100644
index 00000000..1d6c8494
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/langhelpers.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/messaging.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/messaging.cpython-312.pyc
new file mode 100644
index 00000000..b586f8fc
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/messaging.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/pyfiles.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/pyfiles.cpython-312.pyc
new file mode 100644
index 00000000..50a2993a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/pyfiles.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/sqla_compat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/sqla_compat.cpython-312.pyc
new file mode 100644
index 00000000..667dac4c
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/alembic/util/__pycache__/sqla_compat.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/compat.py b/Backend/venv/lib/python3.12/site-packages/alembic/util/compat.py
new file mode 100644
index 00000000..31e0208d
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/util/compat.py
@@ -0,0 +1,73 @@
+from __future__ import annotations
+
+from configparser import ConfigParser
+import io
+import os
+import sys
+import typing
+from typing import Sequence
+from typing import Union
+
+from sqlalchemy.util import inspect_getfullargspec # noqa
+from sqlalchemy.util.compat import inspect_formatargspec # noqa
+
+is_posix = os.name == "posix"
+
+py311 = sys.version_info >= (3, 11)
+py310 = sys.version_info >= (3, 10)
+py39 = sys.version_info >= (3, 9)
+py38 = sys.version_info >= (3, 8)
+
+
+# produce a wrapper that allows encoded text to stream
+# into a given buffer, but doesn't close it.
+# not sure of a more idiomatic approach to this.
+class EncodedIO(io.TextIOWrapper):
+ def close(self) -> None:
+ pass
+
+
+if py39:
+ from importlib import resources as importlib_resources
+ from importlib import metadata as importlib_metadata
+ from importlib.metadata import EntryPoint
+else:
+ import importlib_resources # type:ignore # noqa
+ import importlib_metadata # type:ignore # noqa
+ from importlib_metadata import EntryPoint # type:ignore # noqa
+
+
+def importlib_metadata_get(group: str) -> Sequence[EntryPoint]:
+ ep = importlib_metadata.entry_points()
+ if hasattr(ep, "select"):
+ return ep.select(group=group) # type: ignore
+ else:
+ return ep.get(group, ()) # type: ignore
+
+
+def formatannotation_fwdref(annotation, base_module=None):
+ """vendored from python 3.7"""
+ # copied over _formatannotation from sqlalchemy 2.0
+
+ if isinstance(annotation, str):
+ return annotation
+
+ if getattr(annotation, "__module__", None) == "typing":
+ return repr(annotation).replace("typing.", "").replace("~", "")
+ if isinstance(annotation, type):
+ if annotation.__module__ in ("builtins", base_module):
+ return repr(annotation.__qualname__)
+ return annotation.__module__ + "." + annotation.__qualname__
+ elif isinstance(annotation, typing.TypeVar):
+ return repr(annotation).replace("~", "")
+ return repr(annotation).replace("~", "")
+
+
+def read_config_parser(
+ file_config: ConfigParser,
+ file_argument: Sequence[Union[str, os.PathLike[str]]],
+) -> list[str]:
+ if py310:
+ return file_config.read(file_argument, encoding="locale")
+ else:
+ return file_config.read(file_argument)
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/editor.py b/Backend/venv/lib/python3.12/site-packages/alembic/util/editor.py
new file mode 100644
index 00000000..f1d1557f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/util/editor.py
@@ -0,0 +1,81 @@
+from __future__ import annotations
+
+import os
+from os.path import exists
+from os.path import join
+from os.path import splitext
+from subprocess import check_call
+from typing import Dict
+from typing import List
+from typing import Mapping
+from typing import Optional
+
+from .compat import is_posix
+from .exc import CommandError
+
+
+def open_in_editor(
+ filename: str, environ: Optional[Dict[str, str]] = None
+) -> None:
+ """
+ Opens the given file in a text editor. If the environment variable
+ ``EDITOR`` is set, this is taken as preference.
+
+ Otherwise, a list of commonly installed editors is tried.
+
+ If no editor matches, an :py:exc:`OSError` is raised.
+
+ :param filename: The filename to open. Will be passed verbatim to the
+ editor command.
+ :param environ: An optional drop-in replacement for ``os.environ``. Used
+ mainly for testing.
+ """
+ env = os.environ if environ is None else environ
+ try:
+ editor = _find_editor(env)
+ check_call([editor, filename])
+ except Exception as exc:
+ raise CommandError("Error executing editor (%s)" % (exc,)) from exc
+
+
+def _find_editor(environ: Mapping[str, str]) -> str:
+ candidates = _default_editors()
+ for i, var in enumerate(("EDITOR", "VISUAL")):
+ if var in environ:
+ user_choice = environ[var]
+ if exists(user_choice):
+ return user_choice
+ if os.sep not in user_choice:
+ candidates.insert(i, user_choice)
+
+ for candidate in candidates:
+ path = _find_executable(candidate, environ)
+ if path is not None:
+ return path
+ raise OSError(
+ "No suitable editor found. Please set the "
+ '"EDITOR" or "VISUAL" environment variables'
+ )
+
+
+def _find_executable(
+ candidate: str, environ: Mapping[str, str]
+) -> Optional[str]:
+ # Assuming this is on the PATH, we need to determine it's absolute
+ # location. Otherwise, ``check_call`` will fail
+ if not is_posix and splitext(candidate)[1] != ".exe":
+ candidate += ".exe"
+ for path in environ.get("PATH", "").split(os.pathsep):
+ value = join(path, candidate)
+ if exists(value):
+ return value
+ return None
+
+
+def _default_editors() -> List[str]:
+ # Look for an editor. Prefer the user's choice by env-var, fall back to
+ # most commonly installed editor (nano/vim)
+ if is_posix:
+ return ["sensible-editor", "editor", "nano", "vim", "code"]
+ else:
+ return ["code.exe", "notepad++.exe", "notepad.exe"]
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/exc.py b/Backend/venv/lib/python3.12/site-packages/alembic/util/exc.py
new file mode 100644
index 00000000..0d0496b1
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/util/exc.py
@@ -0,0 +1,6 @@
+class CommandError(Exception):
+ pass
+
+
+class AutogenerateDiffsDetected(CommandError):
+ pass
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/langhelpers.py b/Backend/venv/lib/python3.12/site-packages/alembic/util/langhelpers.py
new file mode 100644
index 00000000..34d48bc6
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/util/langhelpers.py
@@ -0,0 +1,290 @@
+from __future__ import annotations
+
+import collections
+from collections.abc import Iterable
+import textwrap
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import List
+from typing import Mapping
+from typing import Optional
+from typing import overload
+from typing import Sequence
+from typing import Tuple
+from typing import TypeVar
+from typing import Union
+import uuid
+import warnings
+
+from sqlalchemy.util import asbool # noqa
+from sqlalchemy.util import immutabledict # noqa
+from sqlalchemy.util import memoized_property # noqa
+from sqlalchemy.util import to_list # noqa
+from sqlalchemy.util import unique_list # noqa
+
+from .compat import inspect_getfullargspec
+
+
+EMPTY_DICT: Mapping[Any, Any] = immutabledict()
+_T = TypeVar("_T")
+
+
+class _ModuleClsMeta(type):
+ def __setattr__(cls, key: str, value: Callable) -> None:
+ super().__setattr__(key, value)
+ cls._update_module_proxies(key) # type: ignore
+
+
+class ModuleClsProxy(metaclass=_ModuleClsMeta):
+ """Create module level proxy functions for the
+ methods on a given class.
+
+ The functions will have a compatible signature
+ as the methods.
+
+ """
+
+ _setups: Dict[type, Tuple[set, list]] = collections.defaultdict(
+ lambda: (set(), [])
+ )
+
+ @classmethod
+ def _update_module_proxies(cls, name: str) -> None:
+ attr_names, modules = cls._setups[cls]
+ for globals_, locals_ in modules:
+ cls._add_proxied_attribute(name, globals_, locals_, attr_names)
+
+ def _install_proxy(self) -> None:
+ attr_names, modules = self._setups[self.__class__]
+ for globals_, locals_ in modules:
+ globals_["_proxy"] = self
+ for attr_name in attr_names:
+ globals_[attr_name] = getattr(self, attr_name)
+
+ def _remove_proxy(self) -> None:
+ attr_names, modules = self._setups[self.__class__]
+ for globals_, locals_ in modules:
+ globals_["_proxy"] = None
+ for attr_name in attr_names:
+ del globals_[attr_name]
+
+ @classmethod
+ def create_module_class_proxy(cls, globals_, locals_):
+ attr_names, modules = cls._setups[cls]
+ modules.append((globals_, locals_))
+ cls._setup_proxy(globals_, locals_, attr_names)
+
+ @classmethod
+ def _setup_proxy(cls, globals_, locals_, attr_names):
+ for methname in dir(cls):
+ cls._add_proxied_attribute(methname, globals_, locals_, attr_names)
+
+ @classmethod
+ def _add_proxied_attribute(cls, methname, globals_, locals_, attr_names):
+ if not methname.startswith("_"):
+ meth = getattr(cls, methname)
+ if callable(meth):
+ locals_[methname] = cls._create_method_proxy(
+ methname, globals_, locals_
+ )
+ else:
+ attr_names.add(methname)
+
+ @classmethod
+ def _create_method_proxy(cls, name, globals_, locals_):
+ fn = getattr(cls, name)
+
+ def _name_error(name, from_):
+ raise NameError(
+ "Can't invoke function '%s', as the proxy object has "
+ "not yet been "
+ "established for the Alembic '%s' class. "
+ "Try placing this code inside a callable."
+ % (name, cls.__name__)
+ ) from from_
+
+ globals_["_name_error"] = _name_error
+
+ translations = getattr(fn, "_legacy_translations", [])
+ if translations:
+ spec = inspect_getfullargspec(fn)
+ if spec[0] and spec[0][0] == "self":
+ spec[0].pop(0)
+
+ outer_args = inner_args = "*args, **kw"
+ translate_str = "args, kw = _translate(%r, %r, %r, args, kw)" % (
+ fn.__name__,
+ tuple(spec),
+ translations,
+ )
+
+ def translate(fn_name, spec, translations, args, kw):
+ return_kw = {}
+ return_args = []
+
+ for oldname, newname in translations:
+ if oldname in kw:
+ warnings.warn(
+ "Argument %r is now named %r "
+ "for method %s()." % (oldname, newname, fn_name)
+ )
+ return_kw[newname] = kw.pop(oldname)
+ return_kw.update(kw)
+
+ args = list(args)
+ if spec[3]:
+ pos_only = spec[0][: -len(spec[3])]
+ else:
+ pos_only = spec[0]
+ for arg in pos_only:
+ if arg not in return_kw:
+ try:
+ return_args.append(args.pop(0))
+ except IndexError:
+ raise TypeError(
+ "missing required positional argument: %s"
+ % arg
+ )
+ return_args.extend(args)
+
+ return return_args, return_kw
+
+ globals_["_translate"] = translate
+ else:
+ outer_args = "*args, **kw"
+ inner_args = "*args, **kw"
+ translate_str = ""
+
+ func_text = textwrap.dedent(
+ """\
+ def %(name)s(%(args)s):
+ %(doc)r
+ %(translate)s
+ try:
+ p = _proxy
+ except NameError as ne:
+ _name_error('%(name)s', ne)
+ return _proxy.%(name)s(%(apply_kw)s)
+ e
+ """
+ % {
+ "name": name,
+ "translate": translate_str,
+ "args": outer_args,
+ "apply_kw": inner_args,
+ "doc": fn.__doc__,
+ }
+ )
+ lcl = {}
+
+ exec(func_text, globals_, lcl)
+ return lcl[name]
+
+
+def _with_legacy_names(translations):
+ def decorate(fn):
+ fn._legacy_translations = translations
+ return fn
+
+ return decorate
+
+
+def rev_id() -> str:
+ return uuid.uuid4().hex[-12:]
+
+
+@overload
+def to_tuple(x: Any, default: tuple) -> tuple:
+ ...
+
+
+@overload
+def to_tuple(x: None, default: Optional[_T] = None) -> _T:
+ ...
+
+
+@overload
+def to_tuple(x: Any, default: Optional[tuple] = None) -> tuple:
+ ...
+
+
+def to_tuple(x, default=None):
+ if x is None:
+ return default
+ elif isinstance(x, str):
+ return (x,)
+ elif isinstance(x, Iterable):
+ return tuple(x)
+ else:
+ return (x,)
+
+
+def dedupe_tuple(tup: Tuple[str, ...]) -> Tuple[str, ...]:
+ return tuple(unique_list(tup))
+
+
+class Dispatcher:
+ def __init__(self, uselist: bool = False) -> None:
+ self._registry: Dict[tuple, Any] = {}
+ self.uselist = uselist
+
+ def dispatch_for(
+ self, target: Any, qualifier: str = "default"
+ ) -> Callable:
+ def decorate(fn):
+ if self.uselist:
+ self._registry.setdefault((target, qualifier), []).append(fn)
+ else:
+ assert (target, qualifier) not in self._registry
+ self._registry[(target, qualifier)] = fn
+ return fn
+
+ return decorate
+
+ def dispatch(self, obj: Any, qualifier: str = "default") -> Any:
+ if isinstance(obj, str):
+ targets: Sequence = [obj]
+ elif isinstance(obj, type):
+ targets = obj.__mro__
+ else:
+ targets = type(obj).__mro__
+
+ for spcls in targets:
+ if qualifier != "default" and (spcls, qualifier) in self._registry:
+ return self._fn_or_list(self._registry[(spcls, qualifier)])
+ elif (spcls, "default") in self._registry:
+ return self._fn_or_list(self._registry[(spcls, "default")])
+ else:
+ raise ValueError("no dispatch function for object: %s" % obj)
+
+ def _fn_or_list(
+ self, fn_or_list: Union[List[Callable], Callable]
+ ) -> Callable:
+ if self.uselist:
+
+ def go(*arg, **kw):
+ for fn in fn_or_list:
+ fn(*arg, **kw)
+
+ return go
+ else:
+ return fn_or_list # type: ignore
+
+ def branch(self) -> Dispatcher:
+ """Return a copy of this dispatcher that is independently
+ writable."""
+
+ d = Dispatcher()
+ if self.uselist:
+ d._registry.update(
+ (k, [fn for fn in self._registry[k]]) for k in self._registry
+ )
+ else:
+ d._registry.update(self._registry)
+ return d
+
+
+def not_none(value: Optional[_T]) -> _T:
+ assert value is not None
+ return value
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/messaging.py b/Backend/venv/lib/python3.12/site-packages/alembic/util/messaging.py
new file mode 100644
index 00000000..35592c0e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/util/messaging.py
@@ -0,0 +1,112 @@
+from __future__ import annotations
+
+from collections.abc import Iterable
+from contextlib import contextmanager
+import logging
+import sys
+import textwrap
+from typing import Optional
+from typing import TextIO
+from typing import Union
+import warnings
+
+from sqlalchemy.engine import url
+
+from . import sqla_compat
+
+log = logging.getLogger(__name__)
+
+# disable "no handler found" errors
+logging.getLogger("alembic").addHandler(logging.NullHandler())
+
+
+try:
+ import fcntl
+ import termios
+ import struct
+
+ ioctl = fcntl.ioctl(0, termios.TIOCGWINSZ, struct.pack("HHHH", 0, 0, 0, 0))
+ _h, TERMWIDTH, _hp, _wp = struct.unpack("HHHH", ioctl)
+ if TERMWIDTH <= 0: # can occur if running in emacs pseudo-tty
+ TERMWIDTH = None
+except (ImportError, OSError):
+ TERMWIDTH = None
+
+
+def write_outstream(
+ stream: TextIO, *text: Union[str, bytes], quiet: bool = False
+) -> None:
+ if quiet:
+ return
+ encoding = getattr(stream, "encoding", "ascii") or "ascii"
+ for t in text:
+ if not isinstance(t, bytes):
+ t = t.encode(encoding, "replace")
+ t = t.decode(encoding)
+ try:
+ stream.write(t)
+ except OSError:
+ # suppress "broken pipe" errors.
+ # no known way to handle this on Python 3 however
+ # as the exception is "ignored" (noisily) in TextIOWrapper.
+ break
+
+
+@contextmanager
+def status(status_msg: str, newline: bool = False, quiet: bool = False):
+ msg(status_msg + " ...", newline, flush=True, quiet=quiet)
+ try:
+ yield
+ except:
+ if not quiet:
+ write_outstream(sys.stdout, " FAILED\n")
+ raise
+ else:
+ if not quiet:
+ write_outstream(sys.stdout, " done\n")
+
+
+def err(message: str, quiet: bool = False):
+ log.error(message)
+ msg(f"FAILED: {message}", quiet=quiet)
+ sys.exit(-1)
+
+
+def obfuscate_url_pw(input_url: str) -> str:
+ u = url.make_url(input_url)
+ return sqla_compat.url_render_as_string(u, hide_password=True)
+
+
+def warn(msg: str, stacklevel: int = 2) -> None:
+ warnings.warn(msg, UserWarning, stacklevel=stacklevel)
+
+
+def msg(
+ msg: str, newline: bool = True, flush: bool = False, quiet: bool = False
+) -> None:
+ if quiet:
+ return
+ if TERMWIDTH is None:
+ write_outstream(sys.stdout, msg)
+ if newline:
+ write_outstream(sys.stdout, "\n")
+ else:
+ # left indent output lines
+ lines = textwrap.wrap(msg, TERMWIDTH)
+ if len(lines) > 1:
+ for line in lines[0:-1]:
+ write_outstream(sys.stdout, " ", line, "\n")
+ write_outstream(sys.stdout, " ", lines[-1], ("\n" if newline else ""))
+ if flush:
+ sys.stdout.flush()
+
+
+def format_as_comma(value: Optional[Union[str, Iterable[str]]]) -> str:
+ if value is None:
+ return ""
+ elif isinstance(value, str):
+ return value
+ elif isinstance(value, Iterable):
+ return ", ".join(value)
+ else:
+ raise ValueError("Don't know how to comma-format %r" % value)
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/pyfiles.py b/Backend/venv/lib/python3.12/site-packages/alembic/util/pyfiles.py
new file mode 100644
index 00000000..e7576731
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/util/pyfiles.py
@@ -0,0 +1,110 @@
+from __future__ import annotations
+
+import atexit
+from contextlib import ExitStack
+import importlib
+import importlib.machinery
+import importlib.util
+import os
+import re
+import tempfile
+from typing import Optional
+
+from mako import exceptions
+from mako.template import Template
+
+from . import compat
+from .exc import CommandError
+
+
+def template_to_file(
+ template_file: str, dest: str, output_encoding: str, **kw
+) -> None:
+ template = Template(filename=template_file)
+ try:
+ output = template.render_unicode(**kw).encode(output_encoding)
+ except:
+ with tempfile.NamedTemporaryFile(suffix=".txt", delete=False) as ntf:
+ ntf.write(
+ exceptions.text_error_template()
+ .render_unicode()
+ .encode(output_encoding)
+ )
+ fname = ntf.name
+ raise CommandError(
+ "Template rendering failed; see %s for a "
+ "template-oriented traceback." % fname
+ )
+ else:
+ with open(dest, "wb") as f:
+ f.write(output)
+
+
+def coerce_resource_to_filename(fname: str) -> str:
+ """Interpret a filename as either a filesystem location or as a package
+ resource.
+
+ Names that are non absolute paths and contain a colon
+ are interpreted as resources and coerced to a file location.
+
+ """
+ if not os.path.isabs(fname) and ":" in fname:
+ tokens = fname.split(":")
+
+ # from https://importlib-resources.readthedocs.io/en/latest/migration.html#pkg-resources-resource-filename # noqa E501
+
+ file_manager = ExitStack()
+ atexit.register(file_manager.close)
+
+ ref = compat.importlib_resources.files(tokens[0])
+ for tok in tokens[1:]:
+ ref = ref / tok
+ fname = file_manager.enter_context( # type: ignore[assignment]
+ compat.importlib_resources.as_file(ref)
+ )
+ return fname
+
+
+def pyc_file_from_path(path: str) -> Optional[str]:
+ """Given a python source path, locate the .pyc."""
+
+ candidate = importlib.util.cache_from_source(path)
+ if os.path.exists(candidate):
+ return candidate
+
+ # even for pep3147, fall back to the old way of finding .pyc files,
+ # to support sourceless operation
+ filepath, ext = os.path.splitext(path)
+ for ext in importlib.machinery.BYTECODE_SUFFIXES:
+ if os.path.exists(filepath + ext):
+ return filepath + ext
+ else:
+ return None
+
+
+def load_python_file(dir_: str, filename: str):
+ """Load a file from the given path as a Python module."""
+
+ module_id = re.sub(r"\W", "_", filename)
+ path = os.path.join(dir_, filename)
+ _, ext = os.path.splitext(filename)
+ if ext == ".py":
+ if os.path.exists(path):
+ module = load_module_py(module_id, path)
+ else:
+ pyc_path = pyc_file_from_path(path)
+ if pyc_path is None:
+ raise ImportError("Can't find Python file %s" % path)
+ else:
+ module = load_module_py(module_id, pyc_path)
+ elif ext in (".pyc", ".pyo"):
+ module = load_module_py(module_id, path)
+ return module
+
+
+def load_module_py(module_id: str, path: str):
+ spec = importlib.util.spec_from_file_location(module_id, path)
+ assert spec
+ module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(module) # type: ignore
+ return module
diff --git a/Backend/venv/lib/python3.12/site-packages/alembic/util/sqla_compat.py b/Backend/venv/lib/python3.12/site-packages/alembic/util/sqla_compat.py
new file mode 100644
index 00000000..3f175cf5
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/alembic/util/sqla_compat.py
@@ -0,0 +1,639 @@
+from __future__ import annotations
+
+import contextlib
+import re
+from typing import Any
+from typing import Dict
+from typing import Iterable
+from typing import Iterator
+from typing import Mapping
+from typing import Optional
+from typing import TYPE_CHECKING
+from typing import TypeVar
+from typing import Union
+
+from sqlalchemy import __version__
+from sqlalchemy import inspect
+from sqlalchemy import schema
+from sqlalchemy import sql
+from sqlalchemy import types as sqltypes
+from sqlalchemy.engine import url
+from sqlalchemy.ext.compiler import compiles
+from sqlalchemy.schema import CheckConstraint
+from sqlalchemy.schema import Column
+from sqlalchemy.schema import ForeignKeyConstraint
+from sqlalchemy.sql import visitors
+from sqlalchemy.sql.base import DialectKWArgs
+from sqlalchemy.sql.elements import BindParameter
+from sqlalchemy.sql.elements import ColumnClause
+from sqlalchemy.sql.elements import quoted_name
+from sqlalchemy.sql.elements import TextClause
+from sqlalchemy.sql.elements import UnaryExpression
+from sqlalchemy.sql.visitors import traverse
+from typing_extensions import TypeGuard
+
+if TYPE_CHECKING:
+ from sqlalchemy import Index
+ from sqlalchemy import Table
+ from sqlalchemy.engine import Connection
+ from sqlalchemy.engine import Dialect
+ from sqlalchemy.engine import Transaction
+ from sqlalchemy.engine.reflection import Inspector
+ from sqlalchemy.sql.base import ColumnCollection
+ from sqlalchemy.sql.compiler import SQLCompiler
+ from sqlalchemy.sql.dml import Insert
+ from sqlalchemy.sql.elements import ColumnElement
+ from sqlalchemy.sql.schema import Constraint
+ from sqlalchemy.sql.schema import SchemaItem
+ from sqlalchemy.sql.selectable import Select
+ from sqlalchemy.sql.selectable import TableClause
+
+_CE = TypeVar("_CE", bound=Union["ColumnElement[Any]", "SchemaItem"])
+
+
+def _safe_int(value: str) -> Union[int, str]:
+ try:
+ return int(value)
+ except:
+ return value
+
+
+_vers = tuple(
+ [_safe_int(x) for x in re.findall(r"(\d+|[abc]\d)", __version__)]
+)
+sqla_13 = _vers >= (1, 3)
+sqla_14 = _vers >= (1, 4)
+# https://docs.sqlalchemy.org/en/latest/changelog/changelog_14.html#change-0c6e0cc67dfe6fac5164720e57ef307d
+sqla_14_18 = _vers >= (1, 4, 18)
+sqla_14_26 = _vers >= (1, 4, 26)
+sqla_2 = _vers >= (2,)
+sqlalchemy_version = __version__
+
+try:
+ from sqlalchemy.sql.naming import _NONE_NAME as _NONE_NAME
+except ImportError:
+ from sqlalchemy.sql.elements import _NONE_NAME as _NONE_NAME # type: ignore # noqa: E501
+
+
+class _Unsupported:
+ "Placeholder for unsupported SQLAlchemy classes"
+
+
+try:
+ from sqlalchemy import Computed
+except ImportError:
+ if not TYPE_CHECKING:
+
+ class Computed(_Unsupported):
+ pass
+
+ has_computed = False
+ has_computed_reflection = False
+else:
+ has_computed = True
+ has_computed_reflection = _vers >= (1, 3, 16)
+
+try:
+ from sqlalchemy import Identity
+except ImportError:
+ if not TYPE_CHECKING:
+
+ class Identity(_Unsupported):
+ pass
+
+ has_identity = False
+else:
+ identity_has_dialect_kwargs = issubclass(Identity, DialectKWArgs)
+
+ def _get_identity_options_dict(
+ identity: Union[Identity, schema.Sequence, None],
+ dialect_kwargs: bool = False,
+ ) -> Dict[str, Any]:
+ if identity is None:
+ return {}
+ elif identity_has_dialect_kwargs:
+ as_dict = identity._as_dict() # type: ignore
+ if dialect_kwargs:
+ assert isinstance(identity, DialectKWArgs)
+ as_dict.update(identity.dialect_kwargs)
+ else:
+ as_dict = {}
+ if isinstance(identity, Identity):
+ # always=None means something different than always=False
+ as_dict["always"] = identity.always
+ if identity.on_null is not None:
+ as_dict["on_null"] = identity.on_null
+ # attributes common to Identity and Sequence
+ attrs = (
+ "start",
+ "increment",
+ "minvalue",
+ "maxvalue",
+ "nominvalue",
+ "nomaxvalue",
+ "cycle",
+ "cache",
+ "order",
+ )
+ as_dict.update(
+ {
+ key: getattr(identity, key, None)
+ for key in attrs
+ if getattr(identity, key, None) is not None
+ }
+ )
+ return as_dict
+
+ has_identity = True
+
+if sqla_2:
+ from sqlalchemy.sql.base import _NoneName
+else:
+ from sqlalchemy.util import symbol as _NoneName # type: ignore[assignment]
+
+
+_ConstraintName = Union[None, str, _NoneName]
+
+_ConstraintNameDefined = Union[str, _NoneName]
+
+
+def constraint_name_defined(
+ name: _ConstraintName,
+) -> TypeGuard[_ConstraintNameDefined]:
+ return name is _NONE_NAME or isinstance(name, (str, _NoneName))
+
+
+def constraint_name_string(
+ name: _ConstraintName,
+) -> TypeGuard[str]:
+ return isinstance(name, str)
+
+
+def constraint_name_or_none(
+ name: _ConstraintName,
+) -> Optional[str]:
+ return name if constraint_name_string(name) else None
+
+
+AUTOINCREMENT_DEFAULT = "auto"
+
+
+@contextlib.contextmanager
+def _ensure_scope_for_ddl(
+ connection: Optional[Connection],
+) -> Iterator[None]:
+ try:
+ in_transaction = connection.in_transaction # type: ignore[union-attr]
+ except AttributeError:
+ # catch for MockConnection, None
+ in_transaction = None
+ pass
+
+ # yield outside the catch
+ if in_transaction is None:
+ yield
+ else:
+ if not in_transaction():
+ assert connection is not None
+ with connection.begin():
+ yield
+ else:
+ yield
+
+
+def url_render_as_string(url, hide_password=True):
+ if sqla_14:
+ return url.render_as_string(hide_password=hide_password)
+ else:
+ return url.__to_string__(hide_password=hide_password)
+
+
+def _safe_begin_connection_transaction(
+ connection: Connection,
+) -> Transaction:
+ transaction = _get_connection_transaction(connection)
+ if transaction:
+ return transaction
+ else:
+ return connection.begin()
+
+
+def _safe_commit_connection_transaction(
+ connection: Connection,
+) -> None:
+ transaction = _get_connection_transaction(connection)
+ if transaction:
+ transaction.commit()
+
+
+def _safe_rollback_connection_transaction(
+ connection: Connection,
+) -> None:
+ transaction = _get_connection_transaction(connection)
+ if transaction:
+ transaction.rollback()
+
+
+def _get_connection_in_transaction(connection: Optional[Connection]) -> bool:
+ try:
+ in_transaction = connection.in_transaction # type: ignore
+ except AttributeError:
+ # catch for MockConnection
+ return False
+ else:
+ return in_transaction()
+
+
+def _idx_table_bound_expressions(idx: Index) -> Iterable[ColumnElement[Any]]:
+ return idx.expressions # type: ignore
+
+
+def _copy(schema_item: _CE, **kw) -> _CE:
+ if hasattr(schema_item, "_copy"):
+ return schema_item._copy(**kw) # type: ignore[union-attr]
+ else:
+ return schema_item.copy(**kw) # type: ignore[union-attr]
+
+
+def _get_connection_transaction(
+ connection: Connection,
+) -> Optional[Transaction]:
+ if sqla_14:
+ return connection.get_transaction()
+ else:
+ r = connection._root # type: ignore[attr-defined]
+ return r._Connection__transaction
+
+
+def _create_url(*arg, **kw) -> url.URL:
+ if hasattr(url.URL, "create"):
+ return url.URL.create(*arg, **kw)
+ else:
+ return url.URL(*arg, **kw)
+
+
+def _connectable_has_table(
+ connectable: Connection, tablename: str, schemaname: Union[str, None]
+) -> bool:
+ if sqla_14:
+ return inspect(connectable).has_table(tablename, schemaname)
+ else:
+ return connectable.dialect.has_table(
+ connectable, tablename, schemaname
+ )
+
+
+def _exec_on_inspector(inspector, statement, **params):
+ if sqla_14:
+ with inspector._operation_context() as conn:
+ return conn.execute(statement, params)
+ else:
+ return inspector.bind.execute(statement, params)
+
+
+def _nullability_might_be_unset(metadata_column):
+ if not sqla_14:
+ return metadata_column.nullable
+ else:
+ from sqlalchemy.sql import schema
+
+ return (
+ metadata_column._user_defined_nullable is schema.NULL_UNSPECIFIED
+ )
+
+
+def _server_default_is_computed(*server_default) -> bool:
+ if not has_computed:
+ return False
+ else:
+ return any(isinstance(sd, Computed) for sd in server_default)
+
+
+def _server_default_is_identity(*server_default) -> bool:
+ if not sqla_14:
+ return False
+ else:
+ return any(isinstance(sd, Identity) for sd in server_default)
+
+
+def _table_for_constraint(constraint: Constraint) -> Table:
+ if isinstance(constraint, ForeignKeyConstraint):
+ table = constraint.parent
+ assert table is not None
+ return table # type: ignore[return-value]
+ else:
+ return constraint.table
+
+
+def _columns_for_constraint(constraint):
+ if isinstance(constraint, ForeignKeyConstraint):
+ return [fk.parent for fk in constraint.elements]
+ elif isinstance(constraint, CheckConstraint):
+ return _find_columns(constraint.sqltext)
+ else:
+ return list(constraint.columns)
+
+
+def _reflect_table(inspector: Inspector, table: Table) -> None:
+ if sqla_14:
+ return inspector.reflect_table(table, None)
+ else:
+ return inspector.reflecttable( # type: ignore[attr-defined]
+ table, None
+ )
+
+
+def _resolve_for_variant(type_, dialect):
+ if _type_has_variants(type_):
+ base_type, mapping = _get_variant_mapping(type_)
+ return mapping.get(dialect.name, base_type)
+ else:
+ return type_
+
+
+if hasattr(sqltypes.TypeEngine, "_variant_mapping"):
+
+ def _type_has_variants(type_):
+ return bool(type_._variant_mapping)
+
+ def _get_variant_mapping(type_):
+ return type_, type_._variant_mapping
+
+else:
+
+ def _type_has_variants(type_):
+ return type(type_) is sqltypes.Variant
+
+ def _get_variant_mapping(type_):
+ return type_.impl, type_.mapping
+
+
+def _fk_spec(constraint):
+ source_columns = [
+ constraint.columns[key].name for key in constraint.column_keys
+ ]
+
+ source_table = constraint.parent.name
+ source_schema = constraint.parent.schema
+ target_schema = constraint.elements[0].column.table.schema
+ target_table = constraint.elements[0].column.table.name
+ target_columns = [element.column.name for element in constraint.elements]
+ ondelete = constraint.ondelete
+ onupdate = constraint.onupdate
+ deferrable = constraint.deferrable
+ initially = constraint.initially
+ return (
+ source_schema,
+ source_table,
+ source_columns,
+ target_schema,
+ target_table,
+ target_columns,
+ onupdate,
+ ondelete,
+ deferrable,
+ initially,
+ )
+
+
+def _fk_is_self_referential(constraint: ForeignKeyConstraint) -> bool:
+ spec = constraint.elements[0]._get_colspec() # type: ignore[attr-defined]
+ tokens = spec.split(".")
+ tokens.pop(-1) # colname
+ tablekey = ".".join(tokens)
+ assert constraint.parent is not None
+ return tablekey == constraint.parent.key
+
+
+def _is_type_bound(constraint: Constraint) -> bool:
+ # this deals with SQLAlchemy #3260, don't copy CHECK constraints
+ # that will be generated by the type.
+ # new feature added for #3260
+ return constraint._type_bound # type: ignore[attr-defined]
+
+
+def _find_columns(clause):
+ """locate Column objects within the given expression."""
+
+ cols = set()
+ traverse(clause, {}, {"column": cols.add})
+ return cols
+
+
+def _remove_column_from_collection(
+ collection: ColumnCollection, column: Union[Column[Any], ColumnClause[Any]]
+) -> None:
+ """remove a column from a ColumnCollection."""
+
+ # workaround for older SQLAlchemy, remove the
+ # same object that's present
+ assert column.key is not None
+ to_remove = collection[column.key]
+
+ # SQLAlchemy 2.0 will use more ReadOnlyColumnCollection
+ # (renamed from ImmutableColumnCollection)
+ if hasattr(collection, "_immutable") or hasattr(collection, "_readonly"):
+ collection._parent.remove(to_remove)
+ else:
+ collection.remove(to_remove)
+
+
+def _textual_index_column(
+ table: Table, text_: Union[str, TextClause, ColumnElement[Any]]
+) -> Union[ColumnElement[Any], Column[Any]]:
+ """a workaround for the Index construct's severe lack of flexibility"""
+ if isinstance(text_, str):
+ c = Column(text_, sqltypes.NULLTYPE)
+ table.append_column(c)
+ return c
+ elif isinstance(text_, TextClause):
+ return _textual_index_element(table, text_)
+ elif isinstance(text_, _textual_index_element):
+ return _textual_index_column(table, text_.text)
+ elif isinstance(text_, sql.ColumnElement):
+ return _copy_expression(text_, table)
+ else:
+ raise ValueError("String or text() construct expected")
+
+
+def _copy_expression(expression: _CE, target_table: Table) -> _CE:
+ def replace(col):
+ if (
+ isinstance(col, Column)
+ and col.table is not None
+ and col.table is not target_table
+ ):
+ if col.name in target_table.c:
+ return target_table.c[col.name]
+ else:
+ c = _copy(col)
+ target_table.append_column(c)
+ return c
+ else:
+ return None
+
+ return visitors.replacement_traverse( # type: ignore[call-overload]
+ expression, {}, replace
+ )
+
+
+class _textual_index_element(sql.ColumnElement):
+ """Wrap around a sqlalchemy text() construct in such a way that
+ we appear like a column-oriented SQL expression to an Index
+ construct.
+
+ The issue here is that currently the Postgresql dialect, the biggest
+ recipient of functional indexes, keys all the index expressions to
+ the corresponding column expressions when rendering CREATE INDEX,
+ so the Index we create here needs to have a .columns collection that
+ is the same length as the .expressions collection. Ultimately
+ SQLAlchemy should support text() expressions in indexes.
+
+ See SQLAlchemy issue 3174.
+
+ """
+
+ __visit_name__ = "_textual_idx_element"
+
+ def __init__(self, table: Table, text: TextClause) -> None:
+ self.table = table
+ self.text = text
+ self.key = text.text
+ self.fake_column = schema.Column(self.text.text, sqltypes.NULLTYPE)
+ table.append_column(self.fake_column)
+
+ def get_children(self):
+ return [self.fake_column]
+
+
+@compiles(_textual_index_element)
+def _render_textual_index_column(
+ element: _textual_index_element, compiler: SQLCompiler, **kw
+) -> str:
+ return compiler.process(element.text, **kw)
+
+
+class _literal_bindparam(BindParameter):
+ pass
+
+
+@compiles(_literal_bindparam)
+def _render_literal_bindparam(
+ element: _literal_bindparam, compiler: SQLCompiler, **kw
+) -> str:
+ return compiler.render_literal_bindparam(element, **kw)
+
+
+def _get_index_expressions(idx):
+ return list(idx.expressions)
+
+
+def _get_index_column_names(idx):
+ return [getattr(exp, "name", None) for exp in _get_index_expressions(idx)]
+
+
+def _column_kwargs(col: Column) -> Mapping:
+ if sqla_13:
+ return col.kwargs
+ else:
+ return {}
+
+
+def _get_constraint_final_name(
+ constraint: Union[Index, Constraint], dialect: Optional[Dialect]
+) -> Optional[str]:
+ if constraint.name is None:
+ return None
+ assert dialect is not None
+ if sqla_14:
+ # for SQLAlchemy 1.4 we would like to have the option to expand
+ # the use of "deferred" names for constraints as well as to have
+ # some flexibility with "None" name and similar; make use of new
+ # SQLAlchemy API to return what would be the final compiled form of
+ # the name for this dialect.
+ return dialect.identifier_preparer.format_constraint(
+ constraint, _alembic_quote=False
+ )
+ else:
+ # prior to SQLAlchemy 1.4, work around quoting logic to get at the
+ # final compiled name without quotes.
+ if hasattr(constraint.name, "quote"):
+ # might be quoted_name, might be truncated_name, keep it the
+ # same
+ quoted_name_cls: type = type(constraint.name)
+ else:
+ quoted_name_cls = quoted_name
+
+ new_name = quoted_name_cls(str(constraint.name), quote=False)
+ constraint = constraint.__class__(name=new_name)
+
+ if isinstance(constraint, schema.Index):
+ # name should not be quoted.
+ d = dialect.ddl_compiler(dialect, None) # type: ignore[arg-type]
+ return d._prepared_index_name( # type: ignore[attr-defined]
+ constraint
+ )
+ else:
+ # name should not be quoted.
+ return dialect.identifier_preparer.format_constraint(constraint)
+
+
+def _constraint_is_named(
+ constraint: Union[Constraint, Index], dialect: Optional[Dialect]
+) -> bool:
+ if sqla_14:
+ if constraint.name is None:
+ return False
+ assert dialect is not None
+ name = dialect.identifier_preparer.format_constraint(
+ constraint, _alembic_quote=False
+ )
+ return name is not None
+ else:
+ return constraint.name is not None
+
+
+def _is_mariadb(mysql_dialect: Dialect) -> bool:
+ if sqla_14:
+ return mysql_dialect.is_mariadb # type: ignore[attr-defined]
+ else:
+ return bool(
+ mysql_dialect.server_version_info
+ and mysql_dialect._is_mariadb # type: ignore[attr-defined]
+ )
+
+
+def _mariadb_normalized_version_info(mysql_dialect):
+ return mysql_dialect._mariadb_normalized_version_info
+
+
+def _insert_inline(table: Union[TableClause, Table]) -> Insert:
+ if sqla_14:
+ return table.insert().inline()
+ else:
+ return table.insert(inline=True) # type: ignore[call-arg]
+
+
+if sqla_14:
+ from sqlalchemy import create_mock_engine
+ from sqlalchemy import select as _select
+else:
+ from sqlalchemy import create_engine
+
+ def create_mock_engine(url, executor, **kw): # type: ignore[misc]
+ return create_engine(
+ "postgresql://", strategy="mock", executor=executor
+ )
+
+ def _select(*columns, **kw) -> Select: # type: ignore[no-redef]
+ return sql.select(list(columns), **kw) # type: ignore[call-overload]
+
+
+def is_expression_index(index: Index) -> bool:
+ expr: Any
+ for expr in index.expressions:
+ while isinstance(expr, UnaryExpression):
+ expr = expr.element
+ if not isinstance(expr, ColumnClause) or expr.is_literal:
+ return True
+ return False
diff --git a/Backend/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Backend/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/METADATA
new file mode 100644
index 00000000..3ac05cfd
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/METADATA
@@ -0,0 +1,295 @@
+Metadata-Version: 2.3
+Name: annotated-types
+Version: 0.7.0
+Summary: Reusable constraint types to use with typing.Annotated
+Project-URL: Homepage, https://github.com/annotated-types/annotated-types
+Project-URL: Source, https://github.com/annotated-types/annotated-types
+Project-URL: Changelog, https://github.com/annotated-types/annotated-types/releases
+Author-email: Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, Samuel Colvin , Zac Hatfield-Dodds
+License-File: LICENSE
+Classifier: Development Status :: 4 - Beta
+Classifier: Environment :: Console
+Classifier: Environment :: MacOS X
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Information Technology
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: POSIX :: Linux
+Classifier: Operating System :: Unix
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Typing :: Typed
+Requires-Python: >=3.8
+Requires-Dist: typing-extensions>=4.0.0; python_version < '3.9'
+Description-Content-Type: text/markdown
+
+# annotated-types
+
+[](https://github.com/annotated-types/annotated-types/actions?query=event%3Apush+branch%3Amain+workflow%3ACI)
+[](https://pypi.python.org/pypi/annotated-types)
+[](https://github.com/annotated-types/annotated-types)
+[](https://github.com/annotated-types/annotated-types/blob/main/LICENSE)
+
+[PEP-593](https://peps.python.org/pep-0593/) added `typing.Annotated` as a way of
+adding context-specific metadata to existing types, and specifies that
+`Annotated[T, x]` _should_ be treated as `T` by any tool or library without special
+logic for `x`.
+
+This package provides metadata objects which can be used to represent common
+constraints such as upper and lower bounds on scalar values and collection sizes,
+a `Predicate` marker for runtime checks, and
+descriptions of how we intend these metadata to be interpreted. In some cases,
+we also note alternative representations which do not require this package.
+
+## Install
+
+```bash
+pip install annotated-types
+```
+
+## Examples
+
+```python
+from typing import Annotated
+from annotated_types import Gt, Len, Predicate
+
+class MyClass:
+ age: Annotated[int, Gt(18)] # Valid: 19, 20, ...
+ # Invalid: 17, 18, "19", 19.0, ...
+ factors: list[Annotated[int, Predicate(is_prime)]] # Valid: 2, 3, 5, 7, 11, ...
+ # Invalid: 4, 8, -2, 5.0, "prime", ...
+
+ my_list: Annotated[list[int], Len(0, 10)] # Valid: [], [10, 20, 30, 40, 50]
+ # Invalid: (1, 2), ["abc"], [0] * 20
+```
+
+## Documentation
+
+_While `annotated-types` avoids runtime checks for performance, users should not
+construct invalid combinations such as `MultipleOf("non-numeric")` or `Annotated[int, Len(3)]`.
+Downstream implementors may choose to raise an error, emit a warning, silently ignore
+a metadata item, etc., if the metadata objects described below are used with an
+incompatible type - or for any other reason!_
+
+### Gt, Ge, Lt, Le
+
+Express inclusive and/or exclusive bounds on orderable values - which may be numbers,
+dates, times, strings, sets, etc. Note that the boundary value need not be of the
+same type that was annotated, so long as they can be compared: `Annotated[int, Gt(1.5)]`
+is fine, for example, and implies that the value is an integer x such that `x > 1.5`.
+
+We suggest that implementors may also interpret `functools.partial(operator.le, 1.5)`
+as being equivalent to `Gt(1.5)`, for users who wish to avoid a runtime dependency on
+the `annotated-types` package.
+
+To be explicit, these types have the following meanings:
+
+* `Gt(x)` - value must be "Greater Than" `x` - equivalent to exclusive minimum
+* `Ge(x)` - value must be "Greater than or Equal" to `x` - equivalent to inclusive minimum
+* `Lt(x)` - value must be "Less Than" `x` - equivalent to exclusive maximum
+* `Le(x)` - value must be "Less than or Equal" to `x` - equivalent to inclusive maximum
+
+### Interval
+
+`Interval(gt, ge, lt, le)` allows you to specify an upper and lower bound with a single
+metadata object. `None` attributes should be ignored, and non-`None` attributes
+treated as per the single bounds above.
+
+### MultipleOf
+
+`MultipleOf(multiple_of=x)` might be interpreted in two ways:
+
+1. Python semantics, implying `value % multiple_of == 0`, or
+2. [JSONschema semantics](https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.2.1),
+ where `int(value / multiple_of) == value / multiple_of`.
+
+We encourage users to be aware of these two common interpretations and their
+distinct behaviours, especially since very large or non-integer numbers make
+it easy to cause silent data corruption due to floating-point imprecision.
+
+We encourage libraries to carefully document which interpretation they implement.
+
+### MinLen, MaxLen, Len
+
+`Len()` implies that `min_length <= len(value) <= max_length` - lower and upper bounds are inclusive.
+
+As well as `Len()` which can optionally include upper and lower bounds, we also
+provide `MinLen(x)` and `MaxLen(y)` which are equivalent to `Len(min_length=x)`
+and `Len(max_length=y)` respectively.
+
+`Len`, `MinLen`, and `MaxLen` may be used with any type which supports `len(value)`.
+
+Examples of usage:
+
+* `Annotated[list, MaxLen(10)]` (or `Annotated[list, Len(max_length=10))`) - list must have a length of 10 or less
+* `Annotated[str, MaxLen(10)]` - string must have a length of 10 or less
+* `Annotated[list, MinLen(3))` (or `Annotated[list, Len(min_length=3))`) - list must have a length of 3 or more
+* `Annotated[list, Len(4, 6)]` - list must have a length of 4, 5, or 6
+* `Annotated[list, Len(8, 8)]` - list must have a length of exactly 8
+
+#### Changed in v0.4.0
+
+* `min_inclusive` has been renamed to `min_length`, no change in meaning
+* `max_exclusive` has been renamed to `max_length`, upper bound is now **inclusive** instead of **exclusive**
+* The recommendation that slices are interpreted as `Len` has been removed due to ambiguity and different semantic
+ meaning of the upper bound in slices vs. `Len`
+
+See [issue #23](https://github.com/annotated-types/annotated-types/issues/23) for discussion.
+
+### Timezone
+
+`Timezone` can be used with a `datetime` or a `time` to express which timezones
+are allowed. `Annotated[datetime, Timezone(None)]` must be a naive datetime.
+`Timezone[...]` ([literal ellipsis](https://docs.python.org/3/library/constants.html#Ellipsis))
+expresses that any timezone-aware datetime is allowed. You may also pass a specific
+timezone string or [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects)
+object such as `Timezone(timezone.utc)` or `Timezone("Africa/Abidjan")` to express that you only
+allow a specific timezone, though we note that this is often a symptom of fragile design.
+
+#### Changed in v0.x.x
+
+* `Timezone` accepts [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) objects instead of
+ `timezone`, extending compatibility to [`zoneinfo`](https://docs.python.org/3/library/zoneinfo.html) and third party libraries.
+
+### Unit
+
+`Unit(unit: str)` expresses that the annotated numeric value is the magnitude of
+a quantity with the specified unit. For example, `Annotated[float, Unit("m/s")]`
+would be a float representing a velocity in meters per second.
+
+Please note that `annotated_types` itself makes no attempt to parse or validate
+the unit string in any way. That is left entirely to downstream libraries,
+such as [`pint`](https://pint.readthedocs.io) or
+[`astropy.units`](https://docs.astropy.org/en/stable/units/).
+
+An example of how a library might use this metadata:
+
+```python
+from annotated_types import Unit
+from typing import Annotated, TypeVar, Callable, Any, get_origin, get_args
+
+# given a type annotated with a unit:
+Meters = Annotated[float, Unit("m")]
+
+
+# you can cast the annotation to a specific unit type with any
+# callable that accepts a string and returns the desired type
+T = TypeVar("T")
+def cast_unit(tp: Any, unit_cls: Callable[[str], T]) -> T | None:
+ if get_origin(tp) is Annotated:
+ for arg in get_args(tp):
+ if isinstance(arg, Unit):
+ return unit_cls(arg.unit)
+ return None
+
+
+# using `pint`
+import pint
+pint_unit = cast_unit(Meters, pint.Unit)
+
+
+# using `astropy.units`
+import astropy.units as u
+astropy_unit = cast_unit(Meters, u.Unit)
+```
+
+### Predicate
+
+`Predicate(func: Callable)` expresses that `func(value)` is truthy for valid values.
+Users should prefer the statically inspectable metadata above, but if you need
+the full power and flexibility of arbitrary runtime predicates... here it is.
+
+For some common constraints, we provide generic types:
+
+* `IsLower = Annotated[T, Predicate(str.islower)]`
+* `IsUpper = Annotated[T, Predicate(str.isupper)]`
+* `IsDigit = Annotated[T, Predicate(str.isdigit)]`
+* `IsFinite = Annotated[T, Predicate(math.isfinite)]`
+* `IsNotFinite = Annotated[T, Predicate(Not(math.isfinite))]`
+* `IsNan = Annotated[T, Predicate(math.isnan)]`
+* `IsNotNan = Annotated[T, Predicate(Not(math.isnan))]`
+* `IsInfinite = Annotated[T, Predicate(math.isinf)]`
+* `IsNotInfinite = Annotated[T, Predicate(Not(math.isinf))]`
+
+so that you can write e.g. `x: IsFinite[float] = 2.0` instead of the longer
+(but exactly equivalent) `x: Annotated[float, Predicate(math.isfinite)] = 2.0`.
+
+Some libraries might have special logic to handle known or understandable predicates,
+for example by checking for `str.isdigit` and using its presence to both call custom
+logic to enforce digit-only strings, and customise some generated external schema.
+Users are therefore encouraged to avoid indirection like `lambda s: s.lower()`, in
+favor of introspectable methods such as `str.lower` or `re.compile("pattern").search`.
+
+To enable basic negation of commonly used predicates like `math.isnan` without introducing introspection that makes it impossible for implementers to introspect the predicate we provide a `Not` wrapper that simply negates the predicate in an introspectable manner. Several of the predicates listed above are created in this manner.
+
+We do not specify what behaviour should be expected for predicates that raise
+an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
+skip invalid constraints, or statically raise an error; or it might try calling it
+and then propagate or discard the resulting
+`TypeError: descriptor 'isdigit' for 'str' objects doesn't apply to a 'int' object`
+exception. We encourage libraries to document the behaviour they choose.
+
+### Doc
+
+`doc()` can be used to add documentation information in `Annotated`, for function and method parameters, variables, class attributes, return types, and any place where `Annotated` can be used.
+
+It expects a value that can be statically analyzed, as the main use case is for static analysis, editors, documentation generators, and similar tools.
+
+It returns a `DocInfo` class with a single attribute `documentation` containing the value passed to `doc()`.
+
+This is the early adopter's alternative form of the [`typing-doc` proposal](https://github.com/tiangolo/fastapi/blob/typing-doc/typing_doc.md).
+
+### Integrating downstream types with `GroupedMetadata`
+
+Implementers may choose to provide a convenience wrapper that groups multiple pieces of metadata.
+This can help reduce verbosity and cognitive overhead for users.
+For example, an implementer like Pydantic might provide a `Field` or `Meta` type that accepts keyword arguments and transforms these into low-level metadata:
+
+```python
+from dataclasses import dataclass
+from typing import Iterator
+from annotated_types import GroupedMetadata, Ge
+
+@dataclass
+class Field(GroupedMetadata):
+ ge: int | None = None
+ description: str | None = None
+
+ def __iter__(self) -> Iterator[object]:
+ # Iterating over a GroupedMetadata object should yield annotated-types
+ # constraint metadata objects which describe it as fully as possible,
+ # and may include other unknown objects too.
+ if self.ge is not None:
+ yield Ge(self.ge)
+ if self.description is not None:
+ yield Description(self.description)
+```
+
+Libraries consuming annotated-types constraints should check for `GroupedMetadata` and unpack it by iterating over the object and treating the results as if they had been "unpacked" in the `Annotated` type. The same logic should be applied to the [PEP 646 `Unpack` type](https://peps.python.org/pep-0646/), so that `Annotated[T, Field(...)]`, `Annotated[T, Unpack[Field(...)]]` and `Annotated[T, *Field(...)]` are all treated consistently.
+
+Libraries consuming annotated-types should also ignore any metadata they do not recongize that came from unpacking a `GroupedMetadata`, just like they ignore unrecognized metadata in `Annotated` itself.
+
+Our own `annotated_types.Interval` class is a `GroupedMetadata` which unpacks itself into `Gt`, `Lt`, etc., so this is not an abstract concern. Similarly, `annotated_types.Len` is a `GroupedMetadata` which unpacks itself into `MinLen` (optionally) and `MaxLen`.
+
+### Consuming metadata
+
+We intend to not be prescriptive as to _how_ the metadata and constraints are used, but as an example of how one might parse constraints from types annotations see our [implementation in `test_main.py`](https://github.com/annotated-types/annotated-types/blob/f59cf6d1b5255a0fe359b93896759a180bec30ae/tests/test_main.py#L94-L103).
+
+It is up to the implementer to determine how this metadata is used.
+You could use the metadata for runtime type checking, for generating schemas or to generate example data, amongst other use cases.
+
+## Design & History
+
+This package was designed at the PyCon 2022 sprints by the maintainers of Pydantic
+and Hypothesis, with the goal of making it as easy as possible for end-users to
+provide more informative annotations for use by runtime libraries.
+
+It is deliberately minimal, and following PEP-593 allows considerable downstream
+discretion in what (if anything!) they choose to support. Nonetheless, we expect
+that staying simple and covering _only_ the most common use-cases will give users
+and maintainers the best experience we can. If you'd like more constraints for your
+types - follow our lead, by defining them and documenting them downstream!
diff --git a/Backend/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/RECORD
new file mode 100644
index 00000000..a66e2783
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/RECORD
@@ -0,0 +1,10 @@
+annotated_types-0.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+annotated_types-0.7.0.dist-info/METADATA,sha256=7ltqxksJJ0wCYFGBNIQCWTlWQGeAH0hRFdnK3CB895E,15046
+annotated_types-0.7.0.dist-info/RECORD,,
+annotated_types-0.7.0.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87
+annotated_types-0.7.0.dist-info/licenses/LICENSE,sha256=_hBJiEsaDZNCkB6I4H8ykl0ksxIdmXK2poBfuYJLCV0,1083
+annotated_types/__init__.py,sha256=RynLsRKUEGI0KimXydlD1fZEfEzWwDo0Uon3zOKhG1Q,13819
+annotated_types/__pycache__/__init__.cpython-312.pyc,,
+annotated_types/__pycache__/test_cases.cpython-312.pyc,,
+annotated_types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+annotated_types/test_cases.py,sha256=zHFX6EpcMbGJ8FzBYDbO56bPwx_DYIVSKbZM-4B3_lg,6421
diff --git a/Backend/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/WHEEL
new file mode 100644
index 00000000..516596c7
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: hatchling 1.24.2
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/Backend/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE b/Backend/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE
new file mode 100644
index 00000000..d99323a9
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2022 the contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/Backend/venv/lib/python3.12/site-packages/annotated_types/__init__.py b/Backend/venv/lib/python3.12/site-packages/annotated_types/__init__.py
new file mode 100644
index 00000000..74e0deea
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/annotated_types/__init__.py
@@ -0,0 +1,432 @@
+import math
+import sys
+import types
+from dataclasses import dataclass
+from datetime import tzinfo
+from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, SupportsFloat, SupportsIndex, TypeVar, Union
+
+if sys.version_info < (3, 8):
+ from typing_extensions import Protocol, runtime_checkable
+else:
+ from typing import Protocol, runtime_checkable
+
+if sys.version_info < (3, 9):
+ from typing_extensions import Annotated, Literal
+else:
+ from typing import Annotated, Literal
+
+if sys.version_info < (3, 10):
+ EllipsisType = type(Ellipsis)
+ KW_ONLY = {}
+ SLOTS = {}
+else:
+ from types import EllipsisType
+
+ KW_ONLY = {"kw_only": True}
+ SLOTS = {"slots": True}
+
+
+__all__ = (
+ 'BaseMetadata',
+ 'GroupedMetadata',
+ 'Gt',
+ 'Ge',
+ 'Lt',
+ 'Le',
+ 'Interval',
+ 'MultipleOf',
+ 'MinLen',
+ 'MaxLen',
+ 'Len',
+ 'Timezone',
+ 'Predicate',
+ 'LowerCase',
+ 'UpperCase',
+ 'IsDigits',
+ 'IsFinite',
+ 'IsNotFinite',
+ 'IsNan',
+ 'IsNotNan',
+ 'IsInfinite',
+ 'IsNotInfinite',
+ 'doc',
+ 'DocInfo',
+ '__version__',
+)
+
+__version__ = '0.7.0'
+
+
+T = TypeVar('T')
+
+
+# arguments that start with __ are considered
+# positional only
+# see https://peps.python.org/pep-0484/#positional-only-arguments
+
+
+class SupportsGt(Protocol):
+ def __gt__(self: T, __other: T) -> bool:
+ ...
+
+
+class SupportsGe(Protocol):
+ def __ge__(self: T, __other: T) -> bool:
+ ...
+
+
+class SupportsLt(Protocol):
+ def __lt__(self: T, __other: T) -> bool:
+ ...
+
+
+class SupportsLe(Protocol):
+ def __le__(self: T, __other: T) -> bool:
+ ...
+
+
+class SupportsMod(Protocol):
+ def __mod__(self: T, __other: T) -> T:
+ ...
+
+
+class SupportsDiv(Protocol):
+ def __div__(self: T, __other: T) -> T:
+ ...
+
+
+class BaseMetadata:
+ """Base class for all metadata.
+
+ This exists mainly so that implementers
+ can do `isinstance(..., BaseMetadata)` while traversing field annotations.
+ """
+
+ __slots__ = ()
+
+
+@dataclass(frozen=True, **SLOTS)
+class Gt(BaseMetadata):
+ """Gt(gt=x) implies that the value must be greater than x.
+
+ It can be used with any type that supports the ``>`` operator,
+ including numbers, dates and times, strings, sets, and so on.
+ """
+
+ gt: SupportsGt
+
+
+@dataclass(frozen=True, **SLOTS)
+class Ge(BaseMetadata):
+ """Ge(ge=x) implies that the value must be greater than or equal to x.
+
+ It can be used with any type that supports the ``>=`` operator,
+ including numbers, dates and times, strings, sets, and so on.
+ """
+
+ ge: SupportsGe
+
+
+@dataclass(frozen=True, **SLOTS)
+class Lt(BaseMetadata):
+ """Lt(lt=x) implies that the value must be less than x.
+
+ It can be used with any type that supports the ``<`` operator,
+ including numbers, dates and times, strings, sets, and so on.
+ """
+
+ lt: SupportsLt
+
+
+@dataclass(frozen=True, **SLOTS)
+class Le(BaseMetadata):
+ """Le(le=x) implies that the value must be less than or equal to x.
+
+ It can be used with any type that supports the ``<=`` operator,
+ including numbers, dates and times, strings, sets, and so on.
+ """
+
+ le: SupportsLe
+
+
+@runtime_checkable
+class GroupedMetadata(Protocol):
+ """A grouping of multiple objects, like typing.Unpack.
+
+ `GroupedMetadata` on its own is not metadata and has no meaning.
+ All of the constraints and metadata should be fully expressable
+ in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`.
+
+ Concrete implementations should override `GroupedMetadata.__iter__()`
+ to add their own metadata.
+ For example:
+
+ >>> @dataclass
+ >>> class Field(GroupedMetadata):
+ >>> gt: float | None = None
+ >>> description: str | None = None
+ ...
+ >>> def __iter__(self) -> Iterable[object]:
+ >>> if self.gt is not None:
+ >>> yield Gt(self.gt)
+ >>> if self.description is not None:
+ >>> yield Description(self.gt)
+
+ Also see the implementation of `Interval` below for an example.
+
+ Parsers should recognize this and unpack it so that it can be used
+ both with and without unpacking:
+
+ - `Annotated[int, Field(...)]` (parser must unpack Field)
+ - `Annotated[int, *Field(...)]` (PEP-646)
+ """ # noqa: trailing-whitespace
+
+ @property
+ def __is_annotated_types_grouped_metadata__(self) -> Literal[True]:
+ return True
+
+ def __iter__(self) -> Iterator[object]:
+ ...
+
+ if not TYPE_CHECKING:
+ __slots__ = () # allow subclasses to use slots
+
+ def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None:
+ # Basic ABC like functionality without the complexity of an ABC
+ super().__init_subclass__(*args, **kwargs)
+ if cls.__iter__ is GroupedMetadata.__iter__:
+ raise TypeError("Can't subclass GroupedMetadata without implementing __iter__")
+
+ def __iter__(self) -> Iterator[object]: # noqa: F811
+ raise NotImplementedError # more helpful than "None has no attribute..." type errors
+
+
+@dataclass(frozen=True, **KW_ONLY, **SLOTS)
+class Interval(GroupedMetadata):
+ """Interval can express inclusive or exclusive bounds with a single object.
+
+ It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which
+ are interpreted the same way as the single-bound constraints.
+ """
+
+ gt: Union[SupportsGt, None] = None
+ ge: Union[SupportsGe, None] = None
+ lt: Union[SupportsLt, None] = None
+ le: Union[SupportsLe, None] = None
+
+ def __iter__(self) -> Iterator[BaseMetadata]:
+ """Unpack an Interval into zero or more single-bounds."""
+ if self.gt is not None:
+ yield Gt(self.gt)
+ if self.ge is not None:
+ yield Ge(self.ge)
+ if self.lt is not None:
+ yield Lt(self.lt)
+ if self.le is not None:
+ yield Le(self.le)
+
+
+@dataclass(frozen=True, **SLOTS)
+class MultipleOf(BaseMetadata):
+ """MultipleOf(multiple_of=x) might be interpreted in two ways:
+
+ 1. Python semantics, implying ``value % multiple_of == 0``, or
+ 2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of``
+
+ We encourage users to be aware of these two common interpretations,
+ and libraries to carefully document which they implement.
+ """
+
+ multiple_of: Union[SupportsDiv, SupportsMod]
+
+
+@dataclass(frozen=True, **SLOTS)
+class MinLen(BaseMetadata):
+ """
+ MinLen() implies minimum inclusive length,
+ e.g. ``len(value) >= min_length``.
+ """
+
+ min_length: Annotated[int, Ge(0)]
+
+
+@dataclass(frozen=True, **SLOTS)
+class MaxLen(BaseMetadata):
+ """
+ MaxLen() implies maximum inclusive length,
+ e.g. ``len(value) <= max_length``.
+ """
+
+ max_length: Annotated[int, Ge(0)]
+
+
+@dataclass(frozen=True, **SLOTS)
+class Len(GroupedMetadata):
+ """
+ Len() implies that ``min_length <= len(value) <= max_length``.
+
+ Upper bound may be omitted or ``None`` to indicate no upper length bound.
+ """
+
+ min_length: Annotated[int, Ge(0)] = 0
+ max_length: Optional[Annotated[int, Ge(0)]] = None
+
+ def __iter__(self) -> Iterator[BaseMetadata]:
+ """Unpack a Len into zone or more single-bounds."""
+ if self.min_length > 0:
+ yield MinLen(self.min_length)
+ if self.max_length is not None:
+ yield MaxLen(self.max_length)
+
+
+@dataclass(frozen=True, **SLOTS)
+class Timezone(BaseMetadata):
+ """Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive).
+
+ ``Annotated[datetime, Timezone(None)]`` must be a naive datetime.
+ ``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be
+ tz-aware but any timezone is allowed.
+
+ You may also pass a specific timezone string or tzinfo object such as
+ ``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that
+ you only allow a specific timezone, though we note that this is often
+ a symptom of poor design.
+ """
+
+ tz: Union[str, tzinfo, EllipsisType, None]
+
+
+@dataclass(frozen=True, **SLOTS)
+class Unit(BaseMetadata):
+ """Indicates that the value is a physical quantity with the specified unit.
+
+ It is intended for usage with numeric types, where the value represents the
+ magnitude of the quantity. For example, ``distance: Annotated[float, Unit('m')]``
+ or ``speed: Annotated[float, Unit('m/s')]``.
+
+ Interpretation of the unit string is left to the discretion of the consumer.
+ It is suggested to follow conventions established by python libraries that work
+ with physical quantities, such as
+
+ - ``pint`` :
+ - ``astropy.units``:
+
+ For indicating a quantity with a certain dimensionality but without a specific unit
+ it is recommended to use square brackets, e.g. `Annotated[float, Unit('[time]')]`.
+ Note, however, ``annotated_types`` itself makes no use of the unit string.
+ """
+
+ unit: str
+
+
+@dataclass(frozen=True, **SLOTS)
+class Predicate(BaseMetadata):
+ """``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values.
+
+ Users should prefer statically inspectable metadata, but if you need the full
+ power and flexibility of arbitrary runtime predicates... here it is.
+
+ We provide a few predefined predicates for common string constraints:
+ ``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and
+ ``IsDigits = Predicate(str.isdigit)``. Users are encouraged to use methods which
+ can be given special handling, and avoid indirection like ``lambda s: s.lower()``.
+
+ Some libraries might have special logic to handle certain predicates, e.g. by
+ checking for `str.isdigit` and using its presence to both call custom logic to
+ enforce digit-only strings, and customise some generated external schema.
+
+ We do not specify what behaviour should be expected for predicates that raise
+ an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
+ skip invalid constraints, or statically raise an error; or it might try calling it
+ and then propagate or discard the resulting exception.
+ """
+
+ func: Callable[[Any], bool]
+
+ def __repr__(self) -> str:
+ if getattr(self.func, "__name__", "") == "":
+ return f"{self.__class__.__name__}({self.func!r})"
+ if isinstance(self.func, (types.MethodType, types.BuiltinMethodType)) and (
+ namespace := getattr(self.func.__self__, "__name__", None)
+ ):
+ return f"{self.__class__.__name__}({namespace}.{self.func.__name__})"
+ if isinstance(self.func, type(str.isascii)): # method descriptor
+ return f"{self.__class__.__name__}({self.func.__qualname__})"
+ return f"{self.__class__.__name__}({self.func.__name__})"
+
+
+@dataclass
+class Not:
+ func: Callable[[Any], bool]
+
+ def __call__(self, __v: Any) -> bool:
+ return not self.func(__v)
+
+
+_StrType = TypeVar("_StrType", bound=str)
+
+LowerCase = Annotated[_StrType, Predicate(str.islower)]
+"""
+Return True if the string is a lowercase string, False otherwise.
+
+A string is lowercase if all cased characters in the string are lowercase and there is at least one cased character in the string.
+""" # noqa: E501
+UpperCase = Annotated[_StrType, Predicate(str.isupper)]
+"""
+Return True if the string is an uppercase string, False otherwise.
+
+A string is uppercase if all cased characters in the string are uppercase and there is at least one cased character in the string.
+""" # noqa: E501
+IsDigit = Annotated[_StrType, Predicate(str.isdigit)]
+IsDigits = IsDigit # type: ignore # plural for backwards compatibility, see #63
+"""
+Return True if the string is a digit string, False otherwise.
+
+A string is a digit string if all characters in the string are digits and there is at least one character in the string.
+""" # noqa: E501
+IsAscii = Annotated[_StrType, Predicate(str.isascii)]
+"""
+Return True if all characters in the string are ASCII, False otherwise.
+
+ASCII characters have code points in the range U+0000-U+007F. Empty string is ASCII too.
+"""
+
+_NumericType = TypeVar('_NumericType', bound=Union[SupportsFloat, SupportsIndex])
+IsFinite = Annotated[_NumericType, Predicate(math.isfinite)]
+"""Return True if x is neither an infinity nor a NaN, and False otherwise."""
+IsNotFinite = Annotated[_NumericType, Predicate(Not(math.isfinite))]
+"""Return True if x is one of infinity or NaN, and False otherwise"""
+IsNan = Annotated[_NumericType, Predicate(math.isnan)]
+"""Return True if x is a NaN (not a number), and False otherwise."""
+IsNotNan = Annotated[_NumericType, Predicate(Not(math.isnan))]
+"""Return True if x is anything but NaN (not a number), and False otherwise."""
+IsInfinite = Annotated[_NumericType, Predicate(math.isinf)]
+"""Return True if x is a positive or negative infinity, and False otherwise."""
+IsNotInfinite = Annotated[_NumericType, Predicate(Not(math.isinf))]
+"""Return True if x is neither a positive or negative infinity, and False otherwise."""
+
+try:
+ from typing_extensions import DocInfo, doc # type: ignore [attr-defined]
+except ImportError:
+
+ @dataclass(frozen=True, **SLOTS)
+ class DocInfo: # type: ignore [no-redef]
+ """ "
+ The return value of doc(), mainly to be used by tools that want to extract the
+ Annotated documentation at runtime.
+ """
+
+ documentation: str
+ """The documentation string passed to doc()."""
+
+ def doc(
+ documentation: str,
+ ) -> DocInfo:
+ """
+ Add documentation to a type annotation inside of Annotated.
+
+ For example:
+
+ >>> def hi(name: Annotated[int, doc("The name of the user")]) -> None: ...
+ """
+ return DocInfo(documentation)
diff --git a/Backend/venv/lib/python3.12/site-packages/annotated_types/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/annotated_types/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..e2383ef0
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/annotated_types/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/annotated_types/__pycache__/test_cases.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/annotated_types/__pycache__/test_cases.cpython-312.pyc
new file mode 100644
index 00000000..f03f257b
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/annotated_types/__pycache__/test_cases.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/annotated_types/py.typed b/Backend/venv/lib/python3.12/site-packages/annotated_types/py.typed
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/annotated_types/test_cases.py b/Backend/venv/lib/python3.12/site-packages/annotated_types/test_cases.py
new file mode 100644
index 00000000..d9164d68
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/annotated_types/test_cases.py
@@ -0,0 +1,151 @@
+import math
+import sys
+from datetime import date, datetime, timedelta, timezone
+from decimal import Decimal
+from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple
+
+if sys.version_info < (3, 9):
+ from typing_extensions import Annotated
+else:
+ from typing import Annotated
+
+import annotated_types as at
+
+
+class Case(NamedTuple):
+ """
+ A test case for `annotated_types`.
+ """
+
+ annotation: Any
+ valid_cases: Iterable[Any]
+ invalid_cases: Iterable[Any]
+
+
+def cases() -> Iterable[Case]:
+ # Gt, Ge, Lt, Le
+ yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1))
+ yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1))
+ yield Case(
+ Annotated[datetime, at.Gt(datetime(2000, 1, 1))],
+ [datetime(2000, 1, 2), datetime(2000, 1, 3)],
+ [datetime(2000, 1, 1), datetime(1999, 12, 31)],
+ )
+ yield Case(
+ Annotated[datetime, at.Gt(date(2000, 1, 1))],
+ [date(2000, 1, 2), date(2000, 1, 3)],
+ [date(2000, 1, 1), date(1999, 12, 31)],
+ )
+ yield Case(
+ Annotated[datetime, at.Gt(Decimal('1.123'))],
+ [Decimal('1.1231'), Decimal('123')],
+ [Decimal('1.123'), Decimal('0')],
+ )
+
+ yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1))
+ yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1))
+ yield Case(
+ Annotated[datetime, at.Ge(datetime(2000, 1, 1))],
+ [datetime(2000, 1, 2), datetime(2000, 1, 3)],
+ [datetime(1998, 1, 1), datetime(1999, 12, 31)],
+ )
+
+ yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4))
+ yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9))
+ yield Case(
+ Annotated[datetime, at.Lt(datetime(2000, 1, 1))],
+ [datetime(1999, 12, 31), datetime(1999, 12, 31)],
+ [datetime(2000, 1, 2), datetime(2000, 1, 3)],
+ )
+
+ yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000))
+ yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9))
+ yield Case(
+ Annotated[datetime, at.Le(datetime(2000, 1, 1))],
+ [datetime(2000, 1, 1), datetime(1999, 12, 31)],
+ [datetime(2000, 1, 2), datetime(2000, 1, 3)],
+ )
+
+ # Interval
+ yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1))
+ yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1))
+ yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1))
+ yield Case(
+ Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))],
+ [datetime(2000, 1, 2), datetime(2000, 1, 3)],
+ [datetime(2000, 1, 1), datetime(2000, 1, 4)],
+ )
+
+ yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4))
+ yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1))
+
+ # lengths
+
+ yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
+ yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
+ yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
+ yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
+
+ yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10))
+ yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10))
+ yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
+ yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
+
+ yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10))
+ yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234'))
+
+ yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}])
+ yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4}))
+ yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4)))
+
+ # Timezone
+
+ yield Case(
+ Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)]
+ )
+ yield Case(
+ Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)]
+ )
+ yield Case(
+ Annotated[datetime, at.Timezone(timezone.utc)],
+ [datetime(2000, 1, 1, tzinfo=timezone.utc)],
+ [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
+ )
+ yield Case(
+ Annotated[datetime, at.Timezone('Europe/London')],
+ [datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))],
+ [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
+ )
+
+ # Quantity
+
+ yield Case(Annotated[float, at.Unit(unit='m')], (5, 4.2), ('5m', '4.2m'))
+
+ # predicate types
+
+ yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom'])
+ yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC'])
+ yield Case(at.IsDigit[str], ['123'], ['', 'ab', 'a1b2'])
+ yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀'])
+
+ yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5])
+
+ yield Case(at.IsFinite[float], [1.23], [math.nan, math.inf, -math.inf])
+ yield Case(at.IsNotFinite[float], [math.nan, math.inf], [1.23])
+ yield Case(at.IsNan[float], [math.nan], [1.23, math.inf])
+ yield Case(at.IsNotNan[float], [1.23, math.inf], [math.nan])
+ yield Case(at.IsInfinite[float], [math.inf], [math.nan, 1.23])
+ yield Case(at.IsNotInfinite[float], [math.nan, 1.23], [math.inf])
+
+ # check stacked predicates
+ yield Case(at.IsInfinite[Annotated[float, at.Predicate(lambda x: x > 0)]], [math.inf], [-math.inf, 1.23, math.nan])
+
+ # doc
+ yield Case(Annotated[int, at.doc("A number")], [1, 2], [])
+
+ # custom GroupedMetadata
+ class MyCustomGroupedMetadata(at.GroupedMetadata):
+ def __iter__(self) -> Iterator[at.Predicate]:
+ yield at.Predicate(lambda x: float(x).is_integer())
+
+ yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5])
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/LICENSE b/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/LICENSE
new file mode 100644
index 00000000..104eebf5
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/LICENSE
@@ -0,0 +1,20 @@
+The MIT License (MIT)
+
+Copyright (c) 2018 Alex Grönholm
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/METADATA
new file mode 100644
index 00000000..5e46476e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/METADATA
@@ -0,0 +1,105 @@
+Metadata-Version: 2.1
+Name: anyio
+Version: 3.7.1
+Summary: High level compatibility layer for multiple asynchronous event loop implementations
+Author-email: Alex Grönholm
+License: MIT
+Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/
+Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html
+Project-URL: Source code, https://github.com/agronholm/anyio
+Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Framework :: AnyIO
+Classifier: Typing :: Typed
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: idna (>=2.8)
+Requires-Dist: sniffio (>=1.1)
+Requires-Dist: exceptiongroup ; python_version < "3.11"
+Requires-Dist: typing-extensions ; python_version < "3.8"
+Provides-Extra: doc
+Requires-Dist: packaging ; extra == 'doc'
+Requires-Dist: Sphinx ; extra == 'doc'
+Requires-Dist: sphinx-rtd-theme (>=1.2.2) ; extra == 'doc'
+Requires-Dist: sphinxcontrib-jquery ; extra == 'doc'
+Requires-Dist: sphinx-autodoc-typehints (>=1.2.0) ; extra == 'doc'
+Provides-Extra: test
+Requires-Dist: anyio[trio] ; extra == 'test'
+Requires-Dist: coverage[toml] (>=4.5) ; extra == 'test'
+Requires-Dist: hypothesis (>=4.0) ; extra == 'test'
+Requires-Dist: psutil (>=5.9) ; extra == 'test'
+Requires-Dist: pytest (>=7.0) ; extra == 'test'
+Requires-Dist: pytest-mock (>=3.6.1) ; extra == 'test'
+Requires-Dist: trustme ; extra == 'test'
+Requires-Dist: uvloop (>=0.17) ; (python_version < "3.12" and platform_python_implementation == "CPython" and platform_system != "Windows") and extra == 'test'
+Requires-Dist: mock (>=4) ; (python_version < "3.8") and extra == 'test'
+Provides-Extra: trio
+Requires-Dist: trio (<0.22) ; extra == 'trio'
+
+.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg
+ :target: https://github.com/agronholm/anyio/actions/workflows/test.yml
+ :alt: Build Status
+.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master
+ :target: https://coveralls.io/github/agronholm/anyio?branch=master
+ :alt: Code Coverage
+.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest
+ :target: https://anyio.readthedocs.io/en/latest/?badge=latest
+ :alt: Documentation
+.. image:: https://badges.gitter.im/gitterHQ/gitter.svg
+ :target: https://gitter.im/python-trio/AnyIO
+ :alt: Gitter chat
+
+AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or
+trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony
+with the native SC of trio itself.
+
+Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or
+trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full
+refactoring necessary. It will blend in with the native libraries of your chosen backend.
+
+Documentation
+-------------
+
+View full documentation at: https://anyio.readthedocs.io/
+
+Features
+--------
+
+AnyIO offers the following functionality:
+
+* Task groups (nurseries_ in trio terminology)
+* High-level networking (TCP, UDP and UNIX sockets)
+
+ * `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python
+ 3.8)
+ * async/await style UDP sockets (unlike asyncio where you still have to use Transports and
+ Protocols)
+
+* A versatile API for byte streams and object streams
+* Inter-task synchronization and communication (locks, conditions, events, semaphores, object
+ streams)
+* Worker threads
+* Subprocesses
+* Asynchronous file I/O (using worker threads)
+* Signal handling
+
+AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures.
+It even works with the popular Hypothesis_ library.
+
+.. _asyncio: https://docs.python.org/3/library/asyncio.html
+.. _trio: https://github.com/python-trio/trio
+.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency
+.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning
+.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs
+.. _pytest: https://docs.pytest.org/en/latest/
+.. _Hypothesis: https://hypothesis.works/
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/RECORD
new file mode 100644
index 00000000..fd8454f0
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/RECORD
@@ -0,0 +1,82 @@
+anyio-3.7.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+anyio-3.7.1.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081
+anyio-3.7.1.dist-info/METADATA,sha256=mOhfXPB7qKVQh3dUtp2NgLysa10jHWeDBNnRg-93A_c,4708
+anyio-3.7.1.dist-info/RECORD,,
+anyio-3.7.1.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
+anyio-3.7.1.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39
+anyio-3.7.1.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6
+anyio/__init__.py,sha256=Pq9lO03Zm5ynIPlhkquaOuIc1dTTeLGNUQ5HT5qwYMI,4073
+anyio/__pycache__/__init__.cpython-312.pyc,,
+anyio/__pycache__/from_thread.cpython-312.pyc,,
+anyio/__pycache__/lowlevel.cpython-312.pyc,,
+anyio/__pycache__/pytest_plugin.cpython-312.pyc,,
+anyio/__pycache__/to_process.cpython-312.pyc,,
+anyio/__pycache__/to_thread.cpython-312.pyc,,
+anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+anyio/_backends/__pycache__/__init__.cpython-312.pyc,,
+anyio/_backends/__pycache__/_asyncio.cpython-312.pyc,,
+anyio/_backends/__pycache__/_trio.cpython-312.pyc,,
+anyio/_backends/_asyncio.py,sha256=fgwZmYnGOxT_pX0OZTPPgRdFqKLjnKvQUk7tsfuNmfM,67056
+anyio/_backends/_trio.py,sha256=EJAj0tNi0JRM2y3QWP7oS4ct7wnjMSYDG8IZUWMta-E,30035
+anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+anyio/_core/__pycache__/__init__.cpython-312.pyc,,
+anyio/_core/__pycache__/_compat.cpython-312.pyc,,
+anyio/_core/__pycache__/_eventloop.cpython-312.pyc,,
+anyio/_core/__pycache__/_exceptions.cpython-312.pyc,,
+anyio/_core/__pycache__/_fileio.cpython-312.pyc,,
+anyio/_core/__pycache__/_resources.cpython-312.pyc,,
+anyio/_core/__pycache__/_signals.cpython-312.pyc,,
+anyio/_core/__pycache__/_sockets.cpython-312.pyc,,
+anyio/_core/__pycache__/_streams.cpython-312.pyc,,
+anyio/_core/__pycache__/_subprocesses.cpython-312.pyc,,
+anyio/_core/__pycache__/_synchronization.cpython-312.pyc,,
+anyio/_core/__pycache__/_tasks.cpython-312.pyc,,
+anyio/_core/__pycache__/_testing.cpython-312.pyc,,
+anyio/_core/__pycache__/_typedattr.cpython-312.pyc,,
+anyio/_core/_compat.py,sha256=XZfBUInEt7jaiTBI2Qbul7EpJdngbwTtG4Qj26un1YE,5726
+anyio/_core/_eventloop.py,sha256=xJ8KflV1bJ9GAuQRr4o1ojv8wWya4nt_XARta8uLPwc,4083
+anyio/_core/_exceptions.py,sha256=uOrN5l98o6UrOU6O3kPf0VCDl_zPP-kgZs4IyaLVgwU,2916
+anyio/_core/_fileio.py,sha256=DWuIul5izCocmJpgqDDNKc_GhMUwayHKdM5R-sbT_A8,18026
+anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435
+anyio/_core/_signals.py,sha256=KKkZAYL08auydjZnK9S4FQsxx555jT4gXAMcTXdNaok,863
+anyio/_core/_sockets.py,sha256=szcPd7kKBmlHnx8g_KJWZo2k6syouRNF2614ZrtqiV0,20667
+anyio/_core/_streams.py,sha256=5gryxQiUisED8uFUAHje5O44RL9wyndNMANzzQWUn1U,1518
+anyio/_core/_subprocesses.py,sha256=OSAcLAsjfCplXlRyTjWonfS1xU8d5MaZblXYqqY-BM4,4977
+anyio/_core/_synchronization.py,sha256=Uquo_52vZ7iZzDDoaN_j-N7jeyAlefzOZ8Pxt9mU6gY,16747
+anyio/_core/_tasks.py,sha256=1wZZWlpDkr6w3kMD629vzJDkPselDvx4XVElgTCVwyM,5316
+anyio/_core/_testing.py,sha256=7Yll-DOI0uIlIF5VHLUpGGyDPWtDEjFZ85-6ZniwIJU,2217
+anyio/_core/_typedattr.py,sha256=8o0gwQYSl04zlO9uHqcHu1T6hOw7peY9NW1mOX5DKnY,2551
+anyio/abc/__init__.py,sha256=UkC-KDbyIoKeDUDhJciwANSoyzz_qaFh4Fb7_AvwjZc,2159
+anyio/abc/__pycache__/__init__.cpython-312.pyc,,
+anyio/abc/__pycache__/_resources.cpython-312.pyc,,
+anyio/abc/__pycache__/_sockets.cpython-312.pyc,,
+anyio/abc/__pycache__/_streams.cpython-312.pyc,,
+anyio/abc/__pycache__/_subprocesses.cpython-312.pyc,,
+anyio/abc/__pycache__/_tasks.cpython-312.pyc,,
+anyio/abc/__pycache__/_testing.cpython-312.pyc,,
+anyio/abc/_resources.py,sha256=h1rkzr3E0MFqdXLh9aLLXe-A5W7k_Jc-5XzNr6SJ4w4,763
+anyio/abc/_sockets.py,sha256=WWYJ6HndKCEuvobAPDkmX0tjwN2FOxf3eTGb1DB7wHE,5243
+anyio/abc/_streams.py,sha256=yGhOmlVI3W9whmzPuewwYQ2BrKhrUFuWZ4zpVLWOK84,6584
+anyio/abc/_subprocesses.py,sha256=r-totaRbFX6kKV-4WTeuswz8n01aap8cvkYVQCRKN0M,2067
+anyio/abc/_tasks.py,sha256=a_5DLyiCbp0K57LJPOyF-PZyXmUcv_p9VRXPFj_K03M,3413
+anyio/abc/_testing.py,sha256=Eub7gXJ0tVPo_WN5iJAw10FrvC7C1uaL3b2neGr_pfs,1924
+anyio/from_thread.py,sha256=aUVKXctPgZ5wK3p5VTyrtjDj9tSQSrH6xCjBuo-hv3A,16563
+anyio/lowlevel.py,sha256=cOTncxRW5KeswqYQQdp0pfAw6OFWXius1SPhCYwHZL4,4647
+anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+anyio/pytest_plugin.py,sha256=_Txgl0-I3kO1rk_KATXmIUV57C34hajcJCGcgV26CU0,5022
+anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+anyio/streams/__pycache__/__init__.cpython-312.pyc,,
+anyio/streams/__pycache__/buffered.cpython-312.pyc,,
+anyio/streams/__pycache__/file.cpython-312.pyc,,
+anyio/streams/__pycache__/memory.cpython-312.pyc,,
+anyio/streams/__pycache__/stapled.cpython-312.pyc,,
+anyio/streams/__pycache__/text.cpython-312.pyc,,
+anyio/streams/__pycache__/tls.cpython-312.pyc,,
+anyio/streams/buffered.py,sha256=2ifplNLwT73d1UKBxrkFdlC9wTAze9LhPL7pt_7cYgY,4473
+anyio/streams/file.py,sha256=-NP6jMcUd2f1VJwgcxgiRHdEsNnhE0lANl0ov_i7FrE,4356
+anyio/streams/memory.py,sha256=QZhc5qdomBpGCgrUVWAaqEBxI0oklVxK_62atW6tnNk,9274
+anyio/streams/stapled.py,sha256=9u2GxpiOPsGtgO1qsj2tVoW4b8bgiwp5rSDs1BFKkLM,4275
+anyio/streams/text.py,sha256=1K4ZCLKl2b7yywrW6wKEeMu3xyQHE_T0aU5_oC9GPTE,5043
+anyio/streams/tls.py,sha256=TbdCz1KtfEnp3mxHvkROXRefhE6S1LHiwgWiJX8zYaU,12099
+anyio/to_process.py,sha256=_RSsG8UME2nGxeFEdg3OEfv9XshSQwrMU7DAbwWGx9U,9242
+anyio/to_thread.py,sha256=HVpTvBei2sSXgJJeNKdwhJwQaW76LDbb1htQ-Mc6zDs,2146
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/WHEEL
new file mode 100644
index 00000000..1f37c02f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.40.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/entry_points.txt b/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/entry_points.txt
new file mode 100644
index 00000000..44dd9bdc
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/entry_points.txt
@@ -0,0 +1,2 @@
+[pytest11]
+anyio = anyio.pytest_plugin
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/top_level.txt b/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/top_level.txt
new file mode 100644
index 00000000..c77c069e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio-3.7.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+anyio
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/__init__.py b/Backend/venv/lib/python3.12/site-packages/anyio/__init__.py
new file mode 100644
index 00000000..29fb3561
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/__init__.py
@@ -0,0 +1,169 @@
+from __future__ import annotations
+
+__all__ = (
+ "maybe_async",
+ "maybe_async_cm",
+ "run",
+ "sleep",
+ "sleep_forever",
+ "sleep_until",
+ "current_time",
+ "get_all_backends",
+ "get_cancelled_exc_class",
+ "BrokenResourceError",
+ "BrokenWorkerProcess",
+ "BusyResourceError",
+ "ClosedResourceError",
+ "DelimiterNotFound",
+ "EndOfStream",
+ "ExceptionGroup",
+ "IncompleteRead",
+ "TypedAttributeLookupError",
+ "WouldBlock",
+ "AsyncFile",
+ "Path",
+ "open_file",
+ "wrap_file",
+ "aclose_forcefully",
+ "open_signal_receiver",
+ "connect_tcp",
+ "connect_unix",
+ "create_tcp_listener",
+ "create_unix_listener",
+ "create_udp_socket",
+ "create_connected_udp_socket",
+ "getaddrinfo",
+ "getnameinfo",
+ "wait_socket_readable",
+ "wait_socket_writable",
+ "create_memory_object_stream",
+ "run_process",
+ "open_process",
+ "create_lock",
+ "CapacityLimiter",
+ "CapacityLimiterStatistics",
+ "Condition",
+ "ConditionStatistics",
+ "Event",
+ "EventStatistics",
+ "Lock",
+ "LockStatistics",
+ "Semaphore",
+ "SemaphoreStatistics",
+ "create_condition",
+ "create_event",
+ "create_semaphore",
+ "create_capacity_limiter",
+ "open_cancel_scope",
+ "fail_after",
+ "move_on_after",
+ "current_effective_deadline",
+ "TASK_STATUS_IGNORED",
+ "CancelScope",
+ "create_task_group",
+ "TaskInfo",
+ "get_current_task",
+ "get_running_tasks",
+ "wait_all_tasks_blocked",
+ "run_sync_in_worker_thread",
+ "run_async_from_thread",
+ "run_sync_from_thread",
+ "current_default_worker_thread_limiter",
+ "create_blocking_portal",
+ "start_blocking_portal",
+ "typed_attribute",
+ "TypedAttributeSet",
+ "TypedAttributeProvider",
+)
+
+from typing import Any
+
+from ._core._compat import maybe_async, maybe_async_cm
+from ._core._eventloop import (
+ current_time,
+ get_all_backends,
+ get_cancelled_exc_class,
+ run,
+ sleep,
+ sleep_forever,
+ sleep_until,
+)
+from ._core._exceptions import (
+ BrokenResourceError,
+ BrokenWorkerProcess,
+ BusyResourceError,
+ ClosedResourceError,
+ DelimiterNotFound,
+ EndOfStream,
+ ExceptionGroup,
+ IncompleteRead,
+ TypedAttributeLookupError,
+ WouldBlock,
+)
+from ._core._fileio import AsyncFile, Path, open_file, wrap_file
+from ._core._resources import aclose_forcefully
+from ._core._signals import open_signal_receiver
+from ._core._sockets import (
+ connect_tcp,
+ connect_unix,
+ create_connected_udp_socket,
+ create_tcp_listener,
+ create_udp_socket,
+ create_unix_listener,
+ getaddrinfo,
+ getnameinfo,
+ wait_socket_readable,
+ wait_socket_writable,
+)
+from ._core._streams import create_memory_object_stream
+from ._core._subprocesses import open_process, run_process
+from ._core._synchronization import (
+ CapacityLimiter,
+ CapacityLimiterStatistics,
+ Condition,
+ ConditionStatistics,
+ Event,
+ EventStatistics,
+ Lock,
+ LockStatistics,
+ Semaphore,
+ SemaphoreStatistics,
+ create_capacity_limiter,
+ create_condition,
+ create_event,
+ create_lock,
+ create_semaphore,
+)
+from ._core._tasks import (
+ TASK_STATUS_IGNORED,
+ CancelScope,
+ create_task_group,
+ current_effective_deadline,
+ fail_after,
+ move_on_after,
+ open_cancel_scope,
+)
+from ._core._testing import (
+ TaskInfo,
+ get_current_task,
+ get_running_tasks,
+ wait_all_tasks_blocked,
+)
+from ._core._typedattr import TypedAttributeProvider, TypedAttributeSet, typed_attribute
+
+# Re-exported here, for backwards compatibility
+# isort: off
+from .to_thread import current_default_worker_thread_limiter, run_sync_in_worker_thread
+from .from_thread import (
+ create_blocking_portal,
+ run_async_from_thread,
+ run_sync_from_thread,
+ start_blocking_portal,
+)
+
+# Re-export imports so they look like they live directly in this package
+key: str
+value: Any
+for key, value in list(locals().items()):
+ if getattr(value, "__module__", "").startswith("anyio."):
+ value.__module__ = __name__
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..eaa3cc51
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc
new file mode 100644
index 00000000..e12918a5
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc
new file mode 100644
index 00000000..9418957a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc
new file mode 100644
index 00000000..f2d8887b
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc
new file mode 100644
index 00000000..833f2637
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc
new file mode 100644
index 00000000..76bae395
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__init__.py b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..e01157fa
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc
new file mode 100644
index 00000000..8ca20e27
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc
new file mode 100644
index 00000000..19edf9ad
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py
new file mode 100644
index 00000000..bfdb4ea7
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py
@@ -0,0 +1,2117 @@
+from __future__ import annotations
+
+import array
+import asyncio
+import concurrent.futures
+import math
+import socket
+import sys
+from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined]
+from collections import OrderedDict, deque
+from concurrent.futures import Future
+from contextvars import Context, copy_context
+from dataclasses import dataclass
+from functools import partial, wraps
+from inspect import (
+ CORO_RUNNING,
+ CORO_SUSPENDED,
+ GEN_RUNNING,
+ GEN_SUSPENDED,
+ getcoroutinestate,
+ getgeneratorstate,
+)
+from io import IOBase
+from os import PathLike
+from queue import Queue
+from socket import AddressFamily, SocketKind
+from threading import Thread
+from types import TracebackType
+from typing import (
+ IO,
+ Any,
+ AsyncGenerator,
+ Awaitable,
+ Callable,
+ Collection,
+ Coroutine,
+ Generator,
+ Iterable,
+ Mapping,
+ Optional,
+ Sequence,
+ Tuple,
+ TypeVar,
+ Union,
+ cast,
+)
+from weakref import WeakKeyDictionary
+
+import sniffio
+
+from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc
+from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable
+from .._core._eventloop import claim_worker_thread, threadlocals
+from .._core._exceptions import (
+ BrokenResourceError,
+ BusyResourceError,
+ ClosedResourceError,
+ EndOfStream,
+ WouldBlock,
+)
+from .._core._exceptions import ExceptionGroup as BaseExceptionGroup
+from .._core._sockets import GetAddrInfoReturnType, convert_ipv6_sockaddr
+from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter
+from .._core._synchronization import Event as BaseEvent
+from .._core._synchronization import ResourceGuard
+from .._core._tasks import CancelScope as BaseCancelScope
+from ..abc import IPSockAddrType, UDPPacketType
+from ..lowlevel import RunVar
+
+if sys.version_info >= (3, 8):
+
+ def get_coro(task: asyncio.Task) -> Generator | Awaitable[Any]:
+ return task.get_coro()
+
+else:
+
+ def get_coro(task: asyncio.Task) -> Generator | Awaitable[Any]:
+ return task._coro
+
+
+from asyncio import all_tasks, create_task, current_task, get_running_loop
+from asyncio import run as native_run
+
+
+def _get_task_callbacks(task: asyncio.Task) -> Iterable[Callable]:
+ return [cb for cb, context in task._callbacks]
+
+
+T_Retval = TypeVar("T_Retval")
+T_contra = TypeVar("T_contra", contravariant=True)
+
+# Check whether there is native support for task names in asyncio (3.8+)
+_native_task_names = hasattr(asyncio.Task, "get_name")
+
+
+_root_task: RunVar[asyncio.Task | None] = RunVar("_root_task")
+
+
+def find_root_task() -> asyncio.Task:
+ root_task = _root_task.get(None)
+ if root_task is not None and not root_task.done():
+ return root_task
+
+ # Look for a task that has been started via run_until_complete()
+ for task in all_tasks():
+ if task._callbacks and not task.done():
+ for cb in _get_task_callbacks(task):
+ if (
+ cb is _run_until_complete_cb
+ or getattr(cb, "__module__", None) == "uvloop.loop"
+ ):
+ _root_task.set(task)
+ return task
+
+ # Look up the topmost task in the AnyIO task tree, if possible
+ task = cast(asyncio.Task, current_task())
+ state = _task_states.get(task)
+ if state:
+ cancel_scope = state.cancel_scope
+ while cancel_scope and cancel_scope._parent_scope is not None:
+ cancel_scope = cancel_scope._parent_scope
+
+ if cancel_scope is not None:
+ return cast(asyncio.Task, cancel_scope._host_task)
+
+ return task
+
+
+def get_callable_name(func: Callable) -> str:
+ module = getattr(func, "__module__", None)
+ qualname = getattr(func, "__qualname__", None)
+ return ".".join([x for x in (module, qualname) if x])
+
+
+#
+# Event loop
+#
+
+_run_vars = (
+ WeakKeyDictionary()
+) # type: WeakKeyDictionary[asyncio.AbstractEventLoop, Any]
+
+current_token = get_running_loop
+
+
+def _task_started(task: asyncio.Task) -> bool:
+ """Return ``True`` if the task has been started and has not finished."""
+ coro = cast(Coroutine[Any, Any, Any], get_coro(task))
+ try:
+ return getcoroutinestate(coro) in (CORO_RUNNING, CORO_SUSPENDED)
+ except AttributeError:
+ try:
+ return getgeneratorstate(cast(Generator, coro)) in (
+ GEN_RUNNING,
+ GEN_SUSPENDED,
+ )
+ except AttributeError:
+ # task coro is async_genenerator_asend https://bugs.python.org/issue37771
+ raise Exception(f"Cannot determine if task {task} has started or not")
+
+
+def _maybe_set_event_loop_policy(
+ policy: asyncio.AbstractEventLoopPolicy | None, use_uvloop: bool
+) -> None:
+ # On CPython, use uvloop when possible if no other policy has been given and if not
+ # explicitly disabled
+ if policy is None and use_uvloop and sys.implementation.name == "cpython":
+ try:
+ import uvloop
+ except ImportError:
+ pass
+ else:
+ # Test for missing shutdown_default_executor() (uvloop 0.14.0 and earlier)
+ if not hasattr(
+ asyncio.AbstractEventLoop, "shutdown_default_executor"
+ ) or hasattr(uvloop.loop.Loop, "shutdown_default_executor"):
+ policy = uvloop.EventLoopPolicy()
+
+ if policy is not None:
+ asyncio.set_event_loop_policy(policy)
+
+
+def run(
+ func: Callable[..., Awaitable[T_Retval]],
+ *args: object,
+ debug: bool = False,
+ use_uvloop: bool = False,
+ policy: asyncio.AbstractEventLoopPolicy | None = None,
+) -> T_Retval:
+ @wraps(func)
+ async def wrapper() -> T_Retval:
+ task = cast(asyncio.Task, current_task())
+ task_state = TaskState(None, get_callable_name(func), None)
+ _task_states[task] = task_state
+ if _native_task_names:
+ task.set_name(task_state.name)
+
+ try:
+ return await func(*args)
+ finally:
+ del _task_states[task]
+
+ _maybe_set_event_loop_policy(policy, use_uvloop)
+ return native_run(wrapper(), debug=debug)
+
+
+#
+# Miscellaneous
+#
+
+sleep = asyncio.sleep
+
+
+#
+# Timeouts and cancellation
+#
+
+CancelledError = asyncio.CancelledError
+
+
+class CancelScope(BaseCancelScope):
+ def __new__(
+ cls, *, deadline: float = math.inf, shield: bool = False
+ ) -> CancelScope:
+ return object.__new__(cls)
+
+ def __init__(self, deadline: float = math.inf, shield: bool = False):
+ self._deadline = deadline
+ self._shield = shield
+ self._parent_scope: CancelScope | None = None
+ self._cancel_called = False
+ self._active = False
+ self._timeout_handle: asyncio.TimerHandle | None = None
+ self._cancel_handle: asyncio.Handle | None = None
+ self._tasks: set[asyncio.Task] = set()
+ self._host_task: asyncio.Task | None = None
+ self._timeout_expired = False
+ self._cancel_calls: int = 0
+
+ def __enter__(self) -> CancelScope:
+ if self._active:
+ raise RuntimeError(
+ "Each CancelScope may only be used for a single 'with' block"
+ )
+
+ self._host_task = host_task = cast(asyncio.Task, current_task())
+ self._tasks.add(host_task)
+ try:
+ task_state = _task_states[host_task]
+ except KeyError:
+ task_name = host_task.get_name() if _native_task_names else None
+ task_state = TaskState(None, task_name, self)
+ _task_states[host_task] = task_state
+ else:
+ self._parent_scope = task_state.cancel_scope
+ task_state.cancel_scope = self
+
+ self._timeout()
+ self._active = True
+
+ # Start cancelling the host task if the scope was cancelled before entering
+ if self._cancel_called:
+ self._deliver_cancellation()
+
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> bool | None:
+ if not self._active:
+ raise RuntimeError("This cancel scope is not active")
+ if current_task() is not self._host_task:
+ raise RuntimeError(
+ "Attempted to exit cancel scope in a different task than it was "
+ "entered in"
+ )
+
+ assert self._host_task is not None
+ host_task_state = _task_states.get(self._host_task)
+ if host_task_state is None or host_task_state.cancel_scope is not self:
+ raise RuntimeError(
+ "Attempted to exit a cancel scope that isn't the current tasks's "
+ "current cancel scope"
+ )
+
+ self._active = False
+ if self._timeout_handle:
+ self._timeout_handle.cancel()
+ self._timeout_handle = None
+
+ self._tasks.remove(self._host_task)
+
+ host_task_state.cancel_scope = self._parent_scope
+
+ # Restart the cancellation effort in the farthest directly cancelled parent scope if this
+ # one was shielded
+ if self._shield:
+ self._deliver_cancellation_to_parent()
+
+ if exc_val is not None:
+ exceptions = (
+ exc_val.exceptions if isinstance(exc_val, ExceptionGroup) else [exc_val]
+ )
+ if all(isinstance(exc, CancelledError) for exc in exceptions):
+ if self._timeout_expired:
+ return self._uncancel()
+ elif not self._cancel_called:
+ # Task was cancelled natively
+ return None
+ elif not self._parent_cancelled():
+ # This scope was directly cancelled
+ return self._uncancel()
+
+ return None
+
+ def _uncancel(self) -> bool:
+ if sys.version_info < (3, 11) or self._host_task is None:
+ self._cancel_calls = 0
+ return True
+
+ # Uncancel all AnyIO cancellations
+ for i in range(self._cancel_calls):
+ self._host_task.uncancel()
+
+ self._cancel_calls = 0
+ return not self._host_task.cancelling()
+
+ def _timeout(self) -> None:
+ if self._deadline != math.inf:
+ loop = get_running_loop()
+ if loop.time() >= self._deadline:
+ self._timeout_expired = True
+ self.cancel()
+ else:
+ self._timeout_handle = loop.call_at(self._deadline, self._timeout)
+
+ def _deliver_cancellation(self) -> None:
+ """
+ Deliver cancellation to directly contained tasks and nested cancel scopes.
+
+ Schedule another run at the end if we still have tasks eligible for cancellation.
+ """
+ should_retry = False
+ current = current_task()
+ for task in self._tasks:
+ if task._must_cancel: # type: ignore[attr-defined]
+ continue
+
+ # The task is eligible for cancellation if it has started and is not in a cancel
+ # scope shielded from this one
+ cancel_scope = _task_states[task].cancel_scope
+ while cancel_scope is not self:
+ if cancel_scope is None or cancel_scope._shield:
+ break
+ else:
+ cancel_scope = cancel_scope._parent_scope
+ else:
+ should_retry = True
+ if task is not current and (
+ task is self._host_task or _task_started(task)
+ ):
+ self._cancel_calls += 1
+ task.cancel()
+
+ # Schedule another callback if there are still tasks left
+ if should_retry:
+ self._cancel_handle = get_running_loop().call_soon(
+ self._deliver_cancellation
+ )
+ else:
+ self._cancel_handle = None
+
+ def _deliver_cancellation_to_parent(self) -> None:
+ """Start cancellation effort in the farthest directly cancelled parent scope"""
+ scope = self._parent_scope
+ scope_to_cancel: CancelScope | None = None
+ while scope is not None:
+ if scope._cancel_called and scope._cancel_handle is None:
+ scope_to_cancel = scope
+
+ # No point in looking beyond any shielded scope
+ if scope._shield:
+ break
+
+ scope = scope._parent_scope
+
+ if scope_to_cancel is not None:
+ scope_to_cancel._deliver_cancellation()
+
+ def _parent_cancelled(self) -> bool:
+ # Check whether any parent has been cancelled
+ cancel_scope = self._parent_scope
+ while cancel_scope is not None and not cancel_scope._shield:
+ if cancel_scope._cancel_called:
+ return True
+ else:
+ cancel_scope = cancel_scope._parent_scope
+
+ return False
+
+ def cancel(self) -> DeprecatedAwaitable:
+ if not self._cancel_called:
+ if self._timeout_handle:
+ self._timeout_handle.cancel()
+ self._timeout_handle = None
+
+ self._cancel_called = True
+ if self._host_task is not None:
+ self._deliver_cancellation()
+
+ return DeprecatedAwaitable(self.cancel)
+
+ @property
+ def deadline(self) -> float:
+ return self._deadline
+
+ @deadline.setter
+ def deadline(self, value: float) -> None:
+ self._deadline = float(value)
+ if self._timeout_handle is not None:
+ self._timeout_handle.cancel()
+ self._timeout_handle = None
+
+ if self._active and not self._cancel_called:
+ self._timeout()
+
+ @property
+ def cancel_called(self) -> bool:
+ return self._cancel_called
+
+ @property
+ def shield(self) -> bool:
+ return self._shield
+
+ @shield.setter
+ def shield(self, value: bool) -> None:
+ if self._shield != value:
+ self._shield = value
+ if not value:
+ self._deliver_cancellation_to_parent()
+
+
+async def checkpoint() -> None:
+ await sleep(0)
+
+
+async def checkpoint_if_cancelled() -> None:
+ task = current_task()
+ if task is None:
+ return
+
+ try:
+ cancel_scope = _task_states[task].cancel_scope
+ except KeyError:
+ return
+
+ while cancel_scope:
+ if cancel_scope.cancel_called:
+ await sleep(0)
+ elif cancel_scope.shield:
+ break
+ else:
+ cancel_scope = cancel_scope._parent_scope
+
+
+async def cancel_shielded_checkpoint() -> None:
+ with CancelScope(shield=True):
+ await sleep(0)
+
+
+def current_effective_deadline() -> float:
+ try:
+ cancel_scope = _task_states[current_task()].cancel_scope # type: ignore[index]
+ except KeyError:
+ return math.inf
+
+ deadline = math.inf
+ while cancel_scope:
+ deadline = min(deadline, cancel_scope.deadline)
+ if cancel_scope._cancel_called:
+ deadline = -math.inf
+ break
+ elif cancel_scope.shield:
+ break
+ else:
+ cancel_scope = cancel_scope._parent_scope
+
+ return deadline
+
+
+def current_time() -> float:
+ return get_running_loop().time()
+
+
+#
+# Task states
+#
+
+
+class TaskState:
+ """
+ Encapsulates auxiliary task information that cannot be added to the Task instance itself
+ because there are no guarantees about its implementation.
+ """
+
+ __slots__ = "parent_id", "name", "cancel_scope"
+
+ def __init__(
+ self,
+ parent_id: int | None,
+ name: str | None,
+ cancel_scope: CancelScope | None,
+ ):
+ self.parent_id = parent_id
+ self.name = name
+ self.cancel_scope = cancel_scope
+
+
+_task_states = WeakKeyDictionary() # type: WeakKeyDictionary[asyncio.Task, TaskState]
+
+
+#
+# Task groups
+#
+
+
+class ExceptionGroup(BaseExceptionGroup):
+ def __init__(self, exceptions: list[BaseException]):
+ super().__init__()
+ self.exceptions = exceptions
+
+
+class _AsyncioTaskStatus(abc.TaskStatus):
+ def __init__(self, future: asyncio.Future, parent_id: int):
+ self._future = future
+ self._parent_id = parent_id
+
+ def started(self, value: T_contra | None = None) -> None:
+ try:
+ self._future.set_result(value)
+ except asyncio.InvalidStateError:
+ raise RuntimeError(
+ "called 'started' twice on the same task status"
+ ) from None
+
+ task = cast(asyncio.Task, current_task())
+ _task_states[task].parent_id = self._parent_id
+
+
+class TaskGroup(abc.TaskGroup):
+ def __init__(self) -> None:
+ self.cancel_scope: CancelScope = CancelScope()
+ self._active = False
+ self._exceptions: list[BaseException] = []
+
+ async def __aenter__(self) -> TaskGroup:
+ self.cancel_scope.__enter__()
+ self._active = True
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> bool | None:
+ ignore_exception = self.cancel_scope.__exit__(exc_type, exc_val, exc_tb)
+ if exc_val is not None:
+ self.cancel_scope.cancel()
+ self._exceptions.append(exc_val)
+
+ while self.cancel_scope._tasks:
+ try:
+ await asyncio.wait(self.cancel_scope._tasks)
+ except asyncio.CancelledError:
+ self.cancel_scope.cancel()
+
+ self._active = False
+ if not self.cancel_scope._parent_cancelled():
+ exceptions = self._filter_cancellation_errors(self._exceptions)
+ else:
+ exceptions = self._exceptions
+
+ try:
+ if len(exceptions) > 1:
+ if all(
+ isinstance(e, CancelledError) and not e.args for e in exceptions
+ ):
+ # Tasks were cancelled natively, without a cancellation message
+ raise CancelledError
+ else:
+ raise ExceptionGroup(exceptions)
+ elif exceptions and exceptions[0] is not exc_val:
+ raise exceptions[0]
+ except BaseException as exc:
+ # Clear the context here, as it can only be done in-flight.
+ # If the context is not cleared, it can result in recursive tracebacks (see #145).
+ exc.__context__ = None
+ raise
+
+ return ignore_exception
+
+ @staticmethod
+ def _filter_cancellation_errors(
+ exceptions: Sequence[BaseException],
+ ) -> list[BaseException]:
+ filtered_exceptions: list[BaseException] = []
+ for exc in exceptions:
+ if isinstance(exc, ExceptionGroup):
+ new_exceptions = TaskGroup._filter_cancellation_errors(exc.exceptions)
+ if len(new_exceptions) > 1:
+ filtered_exceptions.append(exc)
+ elif len(new_exceptions) == 1:
+ filtered_exceptions.append(new_exceptions[0])
+ elif new_exceptions:
+ new_exc = ExceptionGroup(new_exceptions)
+ new_exc.__cause__ = exc.__cause__
+ new_exc.__context__ = exc.__context__
+ new_exc.__traceback__ = exc.__traceback__
+ filtered_exceptions.append(new_exc)
+ elif not isinstance(exc, CancelledError) or exc.args:
+ filtered_exceptions.append(exc)
+
+ return filtered_exceptions
+
+ async def _run_wrapped_task(
+ self, coro: Coroutine, task_status_future: asyncio.Future | None
+ ) -> None:
+ # This is the code path for Python 3.7 on which asyncio freaks out if a task
+ # raises a BaseException.
+ __traceback_hide__ = __tracebackhide__ = True # noqa: F841
+ task = cast(asyncio.Task, current_task())
+ try:
+ await coro
+ except BaseException as exc:
+ if task_status_future is None or task_status_future.done():
+ self._exceptions.append(exc)
+ self.cancel_scope.cancel()
+ else:
+ task_status_future.set_exception(exc)
+ else:
+ if task_status_future is not None and not task_status_future.done():
+ task_status_future.set_exception(
+ RuntimeError("Child exited without calling task_status.started()")
+ )
+ finally:
+ if task in self.cancel_scope._tasks:
+ self.cancel_scope._tasks.remove(task)
+ del _task_states[task]
+
+ def _spawn(
+ self,
+ func: Callable[..., Awaitable[Any]],
+ args: tuple,
+ name: object,
+ task_status_future: asyncio.Future | None = None,
+ ) -> asyncio.Task:
+ def task_done(_task: asyncio.Task) -> None:
+ # This is the code path for Python 3.8+
+ assert _task in self.cancel_scope._tasks
+ self.cancel_scope._tasks.remove(_task)
+ del _task_states[_task]
+
+ try:
+ exc = _task.exception()
+ except CancelledError as e:
+ while isinstance(e.__context__, CancelledError):
+ e = e.__context__
+
+ exc = e
+
+ if exc is not None:
+ if task_status_future is None or task_status_future.done():
+ self._exceptions.append(exc)
+ self.cancel_scope.cancel()
+ else:
+ task_status_future.set_exception(exc)
+ elif task_status_future is not None and not task_status_future.done():
+ task_status_future.set_exception(
+ RuntimeError("Child exited without calling task_status.started()")
+ )
+
+ if not self._active:
+ raise RuntimeError(
+ "This task group is not active; no new tasks can be started."
+ )
+
+ options: dict[str, Any] = {}
+ name = get_callable_name(func) if name is None else str(name)
+ if _native_task_names:
+ options["name"] = name
+
+ kwargs = {}
+ if task_status_future:
+ parent_id = id(current_task())
+ kwargs["task_status"] = _AsyncioTaskStatus(
+ task_status_future, id(self.cancel_scope._host_task)
+ )
+ else:
+ parent_id = id(self.cancel_scope._host_task)
+
+ coro = func(*args, **kwargs)
+ if not asyncio.iscoroutine(coro):
+ raise TypeError(
+ f"Expected an async function, but {func} appears to be synchronous"
+ )
+
+ foreign_coro = not hasattr(coro, "cr_frame") and not hasattr(coro, "gi_frame")
+ if foreign_coro or sys.version_info < (3, 8):
+ coro = self._run_wrapped_task(coro, task_status_future)
+
+ task = create_task(coro, **options)
+ if not foreign_coro and sys.version_info >= (3, 8):
+ task.add_done_callback(task_done)
+
+ # Make the spawned task inherit the task group's cancel scope
+ _task_states[task] = TaskState(
+ parent_id=parent_id, name=name, cancel_scope=self.cancel_scope
+ )
+ self.cancel_scope._tasks.add(task)
+ return task
+
+ def start_soon(
+ self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None
+ ) -> None:
+ self._spawn(func, args, name)
+
+ async def start(
+ self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None
+ ) -> None:
+ future: asyncio.Future = asyncio.Future()
+ task = self._spawn(func, args, name, future)
+
+ # If the task raises an exception after sending a start value without a switch point
+ # between, the task group is cancelled and this method never proceeds to process the
+ # completed future. That's why we have to have a shielded cancel scope here.
+ with CancelScope(shield=True):
+ try:
+ return await future
+ except CancelledError:
+ task.cancel()
+ raise
+
+
+#
+# Threads
+#
+
+_Retval_Queue_Type = Tuple[Optional[T_Retval], Optional[BaseException]]
+
+
+class WorkerThread(Thread):
+ MAX_IDLE_TIME = 10 # seconds
+
+ def __init__(
+ self,
+ root_task: asyncio.Task,
+ workers: set[WorkerThread],
+ idle_workers: deque[WorkerThread],
+ ):
+ super().__init__(name="AnyIO worker thread")
+ self.root_task = root_task
+ self.workers = workers
+ self.idle_workers = idle_workers
+ self.loop = root_task._loop
+ self.queue: Queue[
+ tuple[Context, Callable, tuple, asyncio.Future] | None
+ ] = Queue(2)
+ self.idle_since = current_time()
+ self.stopping = False
+
+ def _report_result(
+ self, future: asyncio.Future, result: Any, exc: BaseException | None
+ ) -> None:
+ self.idle_since = current_time()
+ if not self.stopping:
+ self.idle_workers.append(self)
+
+ if not future.cancelled():
+ if exc is not None:
+ if isinstance(exc, StopIteration):
+ new_exc = RuntimeError("coroutine raised StopIteration")
+ new_exc.__cause__ = exc
+ exc = new_exc
+
+ future.set_exception(exc)
+ else:
+ future.set_result(result)
+
+ def run(self) -> None:
+ with claim_worker_thread("asyncio"):
+ threadlocals.loop = self.loop
+ while True:
+ item = self.queue.get()
+ if item is None:
+ # Shutdown command received
+ return
+
+ context, func, args, future = item
+ if not future.cancelled():
+ result = None
+ exception: BaseException | None = None
+ try:
+ result = context.run(func, *args)
+ except BaseException as exc:
+ exception = exc
+
+ if not self.loop.is_closed():
+ self.loop.call_soon_threadsafe(
+ self._report_result, future, result, exception
+ )
+
+ self.queue.task_done()
+
+ def stop(self, f: asyncio.Task | None = None) -> None:
+ self.stopping = True
+ self.queue.put_nowait(None)
+ self.workers.discard(self)
+ try:
+ self.idle_workers.remove(self)
+ except ValueError:
+ pass
+
+
+_threadpool_idle_workers: RunVar[deque[WorkerThread]] = RunVar(
+ "_threadpool_idle_workers"
+)
+_threadpool_workers: RunVar[set[WorkerThread]] = RunVar("_threadpool_workers")
+
+
+async def run_sync_in_worker_thread(
+ func: Callable[..., T_Retval],
+ *args: object,
+ cancellable: bool = False,
+ limiter: CapacityLimiter | None = None,
+) -> T_Retval:
+ await checkpoint()
+
+ # If this is the first run in this event loop thread, set up the necessary variables
+ try:
+ idle_workers = _threadpool_idle_workers.get()
+ workers = _threadpool_workers.get()
+ except LookupError:
+ idle_workers = deque()
+ workers = set()
+ _threadpool_idle_workers.set(idle_workers)
+ _threadpool_workers.set(workers)
+
+ async with (limiter or current_default_thread_limiter()):
+ with CancelScope(shield=not cancellable):
+ future: asyncio.Future = asyncio.Future()
+ root_task = find_root_task()
+ if not idle_workers:
+ worker = WorkerThread(root_task, workers, idle_workers)
+ worker.start()
+ workers.add(worker)
+ root_task.add_done_callback(worker.stop)
+ else:
+ worker = idle_workers.pop()
+
+ # Prune any other workers that have been idle for MAX_IDLE_TIME seconds or longer
+ now = current_time()
+ while idle_workers:
+ if now - idle_workers[0].idle_since < WorkerThread.MAX_IDLE_TIME:
+ break
+
+ expired_worker = idle_workers.popleft()
+ expired_worker.root_task.remove_done_callback(expired_worker.stop)
+ expired_worker.stop()
+
+ context = copy_context()
+ context.run(sniffio.current_async_library_cvar.set, None)
+ worker.queue.put_nowait((context, func, args, future))
+ return await future
+
+
+def run_sync_from_thread(
+ func: Callable[..., T_Retval],
+ *args: object,
+ loop: asyncio.AbstractEventLoop | None = None,
+) -> T_Retval:
+ @wraps(func)
+ def wrapper() -> None:
+ try:
+ f.set_result(func(*args))
+ except BaseException as exc:
+ f.set_exception(exc)
+ if not isinstance(exc, Exception):
+ raise
+
+ f: concurrent.futures.Future[T_Retval] = Future()
+ loop = loop or threadlocals.loop
+ loop.call_soon_threadsafe(wrapper)
+ return f.result()
+
+
+def run_async_from_thread(
+ func: Callable[..., Awaitable[T_Retval]], *args: object
+) -> T_Retval:
+ f: concurrent.futures.Future[T_Retval] = asyncio.run_coroutine_threadsafe(
+ func(*args), threadlocals.loop
+ )
+ return f.result()
+
+
+class BlockingPortal(abc.BlockingPortal):
+ def __new__(cls) -> BlockingPortal:
+ return object.__new__(cls)
+
+ def __init__(self) -> None:
+ super().__init__()
+ self._loop = get_running_loop()
+
+ def _spawn_task_from_thread(
+ self,
+ func: Callable,
+ args: tuple,
+ kwargs: dict[str, Any],
+ name: object,
+ future: Future,
+ ) -> None:
+ run_sync_from_thread(
+ partial(self._task_group.start_soon, name=name),
+ self._call_func,
+ func,
+ args,
+ kwargs,
+ future,
+ loop=self._loop,
+ )
+
+
+#
+# Subprocesses
+#
+
+
+@dataclass(eq=False)
+class StreamReaderWrapper(abc.ByteReceiveStream):
+ _stream: asyncio.StreamReader
+
+ async def receive(self, max_bytes: int = 65536) -> bytes:
+ data = await self._stream.read(max_bytes)
+ if data:
+ return data
+ else:
+ raise EndOfStream
+
+ async def aclose(self) -> None:
+ self._stream.feed_eof()
+
+
+@dataclass(eq=False)
+class StreamWriterWrapper(abc.ByteSendStream):
+ _stream: asyncio.StreamWriter
+
+ async def send(self, item: bytes) -> None:
+ self._stream.write(item)
+ await self._stream.drain()
+
+ async def aclose(self) -> None:
+ self._stream.close()
+
+
+@dataclass(eq=False)
+class Process(abc.Process):
+ _process: asyncio.subprocess.Process
+ _stdin: StreamWriterWrapper | None
+ _stdout: StreamReaderWrapper | None
+ _stderr: StreamReaderWrapper | None
+
+ async def aclose(self) -> None:
+ if self._stdin:
+ await self._stdin.aclose()
+ if self._stdout:
+ await self._stdout.aclose()
+ if self._stderr:
+ await self._stderr.aclose()
+
+ await self.wait()
+
+ async def wait(self) -> int:
+ return await self._process.wait()
+
+ def terminate(self) -> None:
+ self._process.terminate()
+
+ def kill(self) -> None:
+ self._process.kill()
+
+ def send_signal(self, signal: int) -> None:
+ self._process.send_signal(signal)
+
+ @property
+ def pid(self) -> int:
+ return self._process.pid
+
+ @property
+ def returncode(self) -> int | None:
+ return self._process.returncode
+
+ @property
+ def stdin(self) -> abc.ByteSendStream | None:
+ return self._stdin
+
+ @property
+ def stdout(self) -> abc.ByteReceiveStream | None:
+ return self._stdout
+
+ @property
+ def stderr(self) -> abc.ByteReceiveStream | None:
+ return self._stderr
+
+
+async def open_process(
+ command: str | bytes | Sequence[str | bytes],
+ *,
+ shell: bool,
+ stdin: int | IO[Any] | None,
+ stdout: int | IO[Any] | None,
+ stderr: int | IO[Any] | None,
+ cwd: str | bytes | PathLike | None = None,
+ env: Mapping[str, str] | None = None,
+ start_new_session: bool = False,
+) -> Process:
+ await checkpoint()
+ if shell:
+ process = await asyncio.create_subprocess_shell(
+ cast(Union[str, bytes], command),
+ stdin=stdin,
+ stdout=stdout,
+ stderr=stderr,
+ cwd=cwd,
+ env=env,
+ start_new_session=start_new_session,
+ )
+ else:
+ process = await asyncio.create_subprocess_exec(
+ *command,
+ stdin=stdin,
+ stdout=stdout,
+ stderr=stderr,
+ cwd=cwd,
+ env=env,
+ start_new_session=start_new_session,
+ )
+
+ stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None
+ stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None
+ stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None
+ return Process(process, stdin_stream, stdout_stream, stderr_stream)
+
+
+def _forcibly_shutdown_process_pool_on_exit(
+ workers: set[Process], _task: object
+) -> None:
+ """
+ Forcibly shuts down worker processes belonging to this event loop."""
+ child_watcher: asyncio.AbstractChildWatcher | None
+ try:
+ child_watcher = asyncio.get_event_loop_policy().get_child_watcher()
+ except NotImplementedError:
+ child_watcher = None
+
+ # Close as much as possible (w/o async/await) to avoid warnings
+ for process in workers:
+ if process.returncode is None:
+ continue
+
+ process._stdin._stream._transport.close() # type: ignore[union-attr]
+ process._stdout._stream._transport.close() # type: ignore[union-attr]
+ process._stderr._stream._transport.close() # type: ignore[union-attr]
+ process.kill()
+ if child_watcher:
+ child_watcher.remove_child_handler(process.pid)
+
+
+async def _shutdown_process_pool_on_exit(workers: set[Process]) -> None:
+ """
+ Shuts down worker processes belonging to this event loop.
+
+ NOTE: this only works when the event loop was started using asyncio.run() or anyio.run().
+
+ """
+ process: Process
+ try:
+ await sleep(math.inf)
+ except asyncio.CancelledError:
+ for process in workers:
+ if process.returncode is None:
+ process.kill()
+
+ for process in workers:
+ await process.aclose()
+
+
+def setup_process_pool_exit_at_shutdown(workers: set[Process]) -> None:
+ kwargs: dict[str, Any] = (
+ {"name": "AnyIO process pool shutdown task"} if _native_task_names else {}
+ )
+ create_task(_shutdown_process_pool_on_exit(workers), **kwargs)
+ find_root_task().add_done_callback(
+ partial(_forcibly_shutdown_process_pool_on_exit, workers)
+ )
+
+
+#
+# Sockets and networking
+#
+
+
+class StreamProtocol(asyncio.Protocol):
+ read_queue: deque[bytes]
+ read_event: asyncio.Event
+ write_event: asyncio.Event
+ exception: Exception | None = None
+
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
+ self.read_queue = deque()
+ self.read_event = asyncio.Event()
+ self.write_event = asyncio.Event()
+ self.write_event.set()
+ cast(asyncio.Transport, transport).set_write_buffer_limits(0)
+
+ def connection_lost(self, exc: Exception | None) -> None:
+ if exc:
+ self.exception = BrokenResourceError()
+ self.exception.__cause__ = exc
+
+ self.read_event.set()
+ self.write_event.set()
+
+ def data_received(self, data: bytes) -> None:
+ self.read_queue.append(data)
+ self.read_event.set()
+
+ def eof_received(self) -> bool | None:
+ self.read_event.set()
+ return True
+
+ def pause_writing(self) -> None:
+ self.write_event = asyncio.Event()
+
+ def resume_writing(self) -> None:
+ self.write_event.set()
+
+
+class DatagramProtocol(asyncio.DatagramProtocol):
+ read_queue: deque[tuple[bytes, IPSockAddrType]]
+ read_event: asyncio.Event
+ write_event: asyncio.Event
+ exception: Exception | None = None
+
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
+ self.read_queue = deque(maxlen=100) # arbitrary value
+ self.read_event = asyncio.Event()
+ self.write_event = asyncio.Event()
+ self.write_event.set()
+
+ def connection_lost(self, exc: Exception | None) -> None:
+ self.read_event.set()
+ self.write_event.set()
+
+ def datagram_received(self, data: bytes, addr: IPSockAddrType) -> None:
+ addr = convert_ipv6_sockaddr(addr)
+ self.read_queue.append((data, addr))
+ self.read_event.set()
+
+ def error_received(self, exc: Exception) -> None:
+ self.exception = exc
+
+ def pause_writing(self) -> None:
+ self.write_event.clear()
+
+ def resume_writing(self) -> None:
+ self.write_event.set()
+
+
+class SocketStream(abc.SocketStream):
+ def __init__(self, transport: asyncio.Transport, protocol: StreamProtocol):
+ self._transport = transport
+ self._protocol = protocol
+ self._receive_guard = ResourceGuard("reading from")
+ self._send_guard = ResourceGuard("writing to")
+ self._closed = False
+
+ @property
+ def _raw_socket(self) -> socket.socket:
+ return self._transport.get_extra_info("socket")
+
+ async def receive(self, max_bytes: int = 65536) -> bytes:
+ with self._receive_guard:
+ await checkpoint()
+
+ if (
+ not self._protocol.read_event.is_set()
+ and not self._transport.is_closing()
+ ):
+ self._transport.resume_reading()
+ await self._protocol.read_event.wait()
+ self._transport.pause_reading()
+
+ try:
+ chunk = self._protocol.read_queue.popleft()
+ except IndexError:
+ if self._closed:
+ raise ClosedResourceError from None
+ elif self._protocol.exception:
+ raise self._protocol.exception
+ else:
+ raise EndOfStream from None
+
+ if len(chunk) > max_bytes:
+ # Split the oversized chunk
+ chunk, leftover = chunk[:max_bytes], chunk[max_bytes:]
+ self._protocol.read_queue.appendleft(leftover)
+
+ # If the read queue is empty, clear the flag so that the next call will block until
+ # data is available
+ if not self._protocol.read_queue:
+ self._protocol.read_event.clear()
+
+ return chunk
+
+ async def send(self, item: bytes) -> None:
+ with self._send_guard:
+ await checkpoint()
+
+ if self._closed:
+ raise ClosedResourceError
+ elif self._protocol.exception is not None:
+ raise self._protocol.exception
+
+ try:
+ self._transport.write(item)
+ except RuntimeError as exc:
+ if self._transport.is_closing():
+ raise BrokenResourceError from exc
+ else:
+ raise
+
+ await self._protocol.write_event.wait()
+
+ async def send_eof(self) -> None:
+ try:
+ self._transport.write_eof()
+ except OSError:
+ pass
+
+ async def aclose(self) -> None:
+ if not self._transport.is_closing():
+ self._closed = True
+ try:
+ self._transport.write_eof()
+ except OSError:
+ pass
+
+ self._transport.close()
+ await sleep(0)
+ self._transport.abort()
+
+
+class UNIXSocketStream(abc.SocketStream):
+ _receive_future: asyncio.Future | None = None
+ _send_future: asyncio.Future | None = None
+ _closing = False
+
+ def __init__(self, raw_socket: socket.socket):
+ self.__raw_socket = raw_socket
+ self._loop = get_running_loop()
+ self._receive_guard = ResourceGuard("reading from")
+ self._send_guard = ResourceGuard("writing to")
+
+ @property
+ def _raw_socket(self) -> socket.socket:
+ return self.__raw_socket
+
+ def _wait_until_readable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future:
+ def callback(f: object) -> None:
+ del self._receive_future
+ loop.remove_reader(self.__raw_socket)
+
+ f = self._receive_future = asyncio.Future()
+ self._loop.add_reader(self.__raw_socket, f.set_result, None)
+ f.add_done_callback(callback)
+ return f
+
+ def _wait_until_writable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future:
+ def callback(f: object) -> None:
+ del self._send_future
+ loop.remove_writer(self.__raw_socket)
+
+ f = self._send_future = asyncio.Future()
+ self._loop.add_writer(self.__raw_socket, f.set_result, None)
+ f.add_done_callback(callback)
+ return f
+
+ async def send_eof(self) -> None:
+ with self._send_guard:
+ self._raw_socket.shutdown(socket.SHUT_WR)
+
+ async def receive(self, max_bytes: int = 65536) -> bytes:
+ loop = get_running_loop()
+ await checkpoint()
+ with self._receive_guard:
+ while True:
+ try:
+ data = self.__raw_socket.recv(max_bytes)
+ except BlockingIOError:
+ await self._wait_until_readable(loop)
+ except OSError as exc:
+ if self._closing:
+ raise ClosedResourceError from None
+ else:
+ raise BrokenResourceError from exc
+ else:
+ if not data:
+ raise EndOfStream
+
+ return data
+
+ async def send(self, item: bytes) -> None:
+ loop = get_running_loop()
+ await checkpoint()
+ with self._send_guard:
+ view = memoryview(item)
+ while view:
+ try:
+ bytes_sent = self.__raw_socket.send(view)
+ except BlockingIOError:
+ await self._wait_until_writable(loop)
+ except OSError as exc:
+ if self._closing:
+ raise ClosedResourceError from None
+ else:
+ raise BrokenResourceError from exc
+ else:
+ view = view[bytes_sent:]
+
+ async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]:
+ if not isinstance(msglen, int) or msglen < 0:
+ raise ValueError("msglen must be a non-negative integer")
+ if not isinstance(maxfds, int) or maxfds < 1:
+ raise ValueError("maxfds must be a positive integer")
+
+ loop = get_running_loop()
+ fds = array.array("i")
+ await checkpoint()
+ with self._receive_guard:
+ while True:
+ try:
+ message, ancdata, flags, addr = self.__raw_socket.recvmsg(
+ msglen, socket.CMSG_LEN(maxfds * fds.itemsize)
+ )
+ except BlockingIOError:
+ await self._wait_until_readable(loop)
+ except OSError as exc:
+ if self._closing:
+ raise ClosedResourceError from None
+ else:
+ raise BrokenResourceError from exc
+ else:
+ if not message and not ancdata:
+ raise EndOfStream
+
+ break
+
+ for cmsg_level, cmsg_type, cmsg_data in ancdata:
+ if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS:
+ raise RuntimeError(
+ f"Received unexpected ancillary data; message = {message!r}, "
+ f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}"
+ )
+
+ fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
+
+ return message, list(fds)
+
+ async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None:
+ if not message:
+ raise ValueError("message must not be empty")
+ if not fds:
+ raise ValueError("fds must not be empty")
+
+ loop = get_running_loop()
+ filenos: list[int] = []
+ for fd in fds:
+ if isinstance(fd, int):
+ filenos.append(fd)
+ elif isinstance(fd, IOBase):
+ filenos.append(fd.fileno())
+
+ fdarray = array.array("i", filenos)
+ await checkpoint()
+ with self._send_guard:
+ while True:
+ try:
+ # The ignore can be removed after mypy picks up
+ # https://github.com/python/typeshed/pull/5545
+ self.__raw_socket.sendmsg(
+ [message], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)]
+ )
+ break
+ except BlockingIOError:
+ await self._wait_until_writable(loop)
+ except OSError as exc:
+ if self._closing:
+ raise ClosedResourceError from None
+ else:
+ raise BrokenResourceError from exc
+
+ async def aclose(self) -> None:
+ if not self._closing:
+ self._closing = True
+ if self.__raw_socket.fileno() != -1:
+ self.__raw_socket.close()
+
+ if self._receive_future:
+ self._receive_future.set_result(None)
+ if self._send_future:
+ self._send_future.set_result(None)
+
+
+class TCPSocketListener(abc.SocketListener):
+ _accept_scope: CancelScope | None = None
+ _closed = False
+
+ def __init__(self, raw_socket: socket.socket):
+ self.__raw_socket = raw_socket
+ self._loop = cast(asyncio.BaseEventLoop, get_running_loop())
+ self._accept_guard = ResourceGuard("accepting connections from")
+
+ @property
+ def _raw_socket(self) -> socket.socket:
+ return self.__raw_socket
+
+ async def accept(self) -> abc.SocketStream:
+ if self._closed:
+ raise ClosedResourceError
+
+ with self._accept_guard:
+ await checkpoint()
+ with CancelScope() as self._accept_scope:
+ try:
+ client_sock, _addr = await self._loop.sock_accept(self._raw_socket)
+ except asyncio.CancelledError:
+ # Workaround for https://bugs.python.org/issue41317
+ try:
+ self._loop.remove_reader(self._raw_socket)
+ except (ValueError, NotImplementedError):
+ pass
+
+ if self._closed:
+ raise ClosedResourceError from None
+
+ raise
+ finally:
+ self._accept_scope = None
+
+ client_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+ transport, protocol = await self._loop.connect_accepted_socket(
+ StreamProtocol, client_sock
+ )
+ return SocketStream(transport, protocol)
+
+ async def aclose(self) -> None:
+ if self._closed:
+ return
+
+ self._closed = True
+ if self._accept_scope:
+ # Workaround for https://bugs.python.org/issue41317
+ try:
+ self._loop.remove_reader(self._raw_socket)
+ except (ValueError, NotImplementedError):
+ pass
+
+ self._accept_scope.cancel()
+ await sleep(0)
+
+ self._raw_socket.close()
+
+
+class UNIXSocketListener(abc.SocketListener):
+ def __init__(self, raw_socket: socket.socket):
+ self.__raw_socket = raw_socket
+ self._loop = get_running_loop()
+ self._accept_guard = ResourceGuard("accepting connections from")
+ self._closed = False
+
+ async def accept(self) -> abc.SocketStream:
+ await checkpoint()
+ with self._accept_guard:
+ while True:
+ try:
+ client_sock, _ = self.__raw_socket.accept()
+ client_sock.setblocking(False)
+ return UNIXSocketStream(client_sock)
+ except BlockingIOError:
+ f: asyncio.Future = asyncio.Future()
+ self._loop.add_reader(self.__raw_socket, f.set_result, None)
+ f.add_done_callback(
+ lambda _: self._loop.remove_reader(self.__raw_socket)
+ )
+ await f
+ except OSError as exc:
+ if self._closed:
+ raise ClosedResourceError from None
+ else:
+ raise BrokenResourceError from exc
+
+ async def aclose(self) -> None:
+ self._closed = True
+ self.__raw_socket.close()
+
+ @property
+ def _raw_socket(self) -> socket.socket:
+ return self.__raw_socket
+
+
+class UDPSocket(abc.UDPSocket):
+ def __init__(
+ self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol
+ ):
+ self._transport = transport
+ self._protocol = protocol
+ self._receive_guard = ResourceGuard("reading from")
+ self._send_guard = ResourceGuard("writing to")
+ self._closed = False
+
+ @property
+ def _raw_socket(self) -> socket.socket:
+ return self._transport.get_extra_info("socket")
+
+ async def aclose(self) -> None:
+ if not self._transport.is_closing():
+ self._closed = True
+ self._transport.close()
+
+ async def receive(self) -> tuple[bytes, IPSockAddrType]:
+ with self._receive_guard:
+ await checkpoint()
+
+ # If the buffer is empty, ask for more data
+ if not self._protocol.read_queue and not self._transport.is_closing():
+ self._protocol.read_event.clear()
+ await self._protocol.read_event.wait()
+
+ try:
+ return self._protocol.read_queue.popleft()
+ except IndexError:
+ if self._closed:
+ raise ClosedResourceError from None
+ else:
+ raise BrokenResourceError from None
+
+ async def send(self, item: UDPPacketType) -> None:
+ with self._send_guard:
+ await checkpoint()
+ await self._protocol.write_event.wait()
+ if self._closed:
+ raise ClosedResourceError
+ elif self._transport.is_closing():
+ raise BrokenResourceError
+ else:
+ self._transport.sendto(*item)
+
+
+class ConnectedUDPSocket(abc.ConnectedUDPSocket):
+ def __init__(
+ self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol
+ ):
+ self._transport = transport
+ self._protocol = protocol
+ self._receive_guard = ResourceGuard("reading from")
+ self._send_guard = ResourceGuard("writing to")
+ self._closed = False
+
+ @property
+ def _raw_socket(self) -> socket.socket:
+ return self._transport.get_extra_info("socket")
+
+ async def aclose(self) -> None:
+ if not self._transport.is_closing():
+ self._closed = True
+ self._transport.close()
+
+ async def receive(self) -> bytes:
+ with self._receive_guard:
+ await checkpoint()
+
+ # If the buffer is empty, ask for more data
+ if not self._protocol.read_queue and not self._transport.is_closing():
+ self._protocol.read_event.clear()
+ await self._protocol.read_event.wait()
+
+ try:
+ packet = self._protocol.read_queue.popleft()
+ except IndexError:
+ if self._closed:
+ raise ClosedResourceError from None
+ else:
+ raise BrokenResourceError from None
+
+ return packet[0]
+
+ async def send(self, item: bytes) -> None:
+ with self._send_guard:
+ await checkpoint()
+ await self._protocol.write_event.wait()
+ if self._closed:
+ raise ClosedResourceError
+ elif self._transport.is_closing():
+ raise BrokenResourceError
+ else:
+ self._transport.sendto(item)
+
+
+async def connect_tcp(
+ host: str, port: int, local_addr: tuple[str, int] | None = None
+) -> SocketStream:
+ transport, protocol = cast(
+ Tuple[asyncio.Transport, StreamProtocol],
+ await get_running_loop().create_connection(
+ StreamProtocol, host, port, local_addr=local_addr
+ ),
+ )
+ transport.pause_reading()
+ return SocketStream(transport, protocol)
+
+
+async def connect_unix(path: str) -> UNIXSocketStream:
+ await checkpoint()
+ loop = get_running_loop()
+ raw_socket = socket.socket(socket.AF_UNIX)
+ raw_socket.setblocking(False)
+ while True:
+ try:
+ raw_socket.connect(path)
+ except BlockingIOError:
+ f: asyncio.Future = asyncio.Future()
+ loop.add_writer(raw_socket, f.set_result, None)
+ f.add_done_callback(lambda _: loop.remove_writer(raw_socket))
+ await f
+ except BaseException:
+ raw_socket.close()
+ raise
+ else:
+ return UNIXSocketStream(raw_socket)
+
+
+async def create_udp_socket(
+ family: socket.AddressFamily,
+ local_address: IPSockAddrType | None,
+ remote_address: IPSockAddrType | None,
+ reuse_port: bool,
+) -> UDPSocket | ConnectedUDPSocket:
+ result = await get_running_loop().create_datagram_endpoint(
+ DatagramProtocol,
+ local_addr=local_address,
+ remote_addr=remote_address,
+ family=family,
+ reuse_port=reuse_port,
+ )
+ transport = result[0]
+ protocol = result[1]
+ if protocol.exception:
+ transport.close()
+ raise protocol.exception
+
+ if not remote_address:
+ return UDPSocket(transport, protocol)
+ else:
+ return ConnectedUDPSocket(transport, protocol)
+
+
+async def getaddrinfo(
+ host: bytes | str,
+ port: str | int | None,
+ *,
+ family: int | AddressFamily = 0,
+ type: int | SocketKind = 0,
+ proto: int = 0,
+ flags: int = 0,
+) -> GetAddrInfoReturnType:
+ # https://github.com/python/typeshed/pull/4304
+ result = await get_running_loop().getaddrinfo(
+ host, port, family=family, type=type, proto=proto, flags=flags
+ )
+ return cast(GetAddrInfoReturnType, result)
+
+
+async def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> tuple[str, str]:
+ return await get_running_loop().getnameinfo(sockaddr, flags)
+
+
+_read_events: RunVar[dict[Any, asyncio.Event]] = RunVar("read_events")
+_write_events: RunVar[dict[Any, asyncio.Event]] = RunVar("write_events")
+
+
+async def wait_socket_readable(sock: socket.socket) -> None:
+ await checkpoint()
+ try:
+ read_events = _read_events.get()
+ except LookupError:
+ read_events = {}
+ _read_events.set(read_events)
+
+ if read_events.get(sock):
+ raise BusyResourceError("reading from") from None
+
+ loop = get_running_loop()
+ event = read_events[sock] = asyncio.Event()
+ loop.add_reader(sock, event.set)
+ try:
+ await event.wait()
+ finally:
+ if read_events.pop(sock, None) is not None:
+ loop.remove_reader(sock)
+ readable = True
+ else:
+ readable = False
+
+ if not readable:
+ raise ClosedResourceError
+
+
+async def wait_socket_writable(sock: socket.socket) -> None:
+ await checkpoint()
+ try:
+ write_events = _write_events.get()
+ except LookupError:
+ write_events = {}
+ _write_events.set(write_events)
+
+ if write_events.get(sock):
+ raise BusyResourceError("writing to") from None
+
+ loop = get_running_loop()
+ event = write_events[sock] = asyncio.Event()
+ loop.add_writer(sock.fileno(), event.set)
+ try:
+ await event.wait()
+ finally:
+ if write_events.pop(sock, None) is not None:
+ loop.remove_writer(sock)
+ writable = True
+ else:
+ writable = False
+
+ if not writable:
+ raise ClosedResourceError
+
+
+#
+# Synchronization
+#
+
+
+class Event(BaseEvent):
+ def __new__(cls) -> Event:
+ return object.__new__(cls)
+
+ def __init__(self) -> None:
+ self._event = asyncio.Event()
+
+ def set(self) -> DeprecatedAwaitable:
+ self._event.set()
+ return DeprecatedAwaitable(self.set)
+
+ def is_set(self) -> bool:
+ return self._event.is_set()
+
+ async def wait(self) -> None:
+ if await self._event.wait():
+ await checkpoint()
+
+ def statistics(self) -> EventStatistics:
+ return EventStatistics(len(self._event._waiters)) # type: ignore[attr-defined]
+
+
+class CapacityLimiter(BaseCapacityLimiter):
+ _total_tokens: float = 0
+
+ def __new__(cls, total_tokens: float) -> CapacityLimiter:
+ return object.__new__(cls)
+
+ def __init__(self, total_tokens: float):
+ self._borrowers: set[Any] = set()
+ self._wait_queue: OrderedDict[Any, asyncio.Event] = OrderedDict()
+ self.total_tokens = total_tokens
+
+ async def __aenter__(self) -> None:
+ await self.acquire()
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> None:
+ self.release()
+
+ @property
+ def total_tokens(self) -> float:
+ return self._total_tokens
+
+ @total_tokens.setter
+ def total_tokens(self, value: float) -> None:
+ if not isinstance(value, int) and not math.isinf(value):
+ raise TypeError("total_tokens must be an int or math.inf")
+ if value < 1:
+ raise ValueError("total_tokens must be >= 1")
+
+ old_value = self._total_tokens
+ self._total_tokens = value
+ events = []
+ for event in self._wait_queue.values():
+ if value <= old_value:
+ break
+
+ if not event.is_set():
+ events.append(event)
+ old_value += 1
+
+ for event in events:
+ event.set()
+
+ @property
+ def borrowed_tokens(self) -> int:
+ return len(self._borrowers)
+
+ @property
+ def available_tokens(self) -> float:
+ return self._total_tokens - len(self._borrowers)
+
+ def acquire_nowait(self) -> DeprecatedAwaitable:
+ self.acquire_on_behalf_of_nowait(current_task())
+ return DeprecatedAwaitable(self.acquire_nowait)
+
+ def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable:
+ if borrower in self._borrowers:
+ raise RuntimeError(
+ "this borrower is already holding one of this CapacityLimiter's "
+ "tokens"
+ )
+
+ if self._wait_queue or len(self._borrowers) >= self._total_tokens:
+ raise WouldBlock
+
+ self._borrowers.add(borrower)
+ return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait)
+
+ async def acquire(self) -> None:
+ return await self.acquire_on_behalf_of(current_task())
+
+ async def acquire_on_behalf_of(self, borrower: object) -> None:
+ await checkpoint_if_cancelled()
+ try:
+ self.acquire_on_behalf_of_nowait(borrower)
+ except WouldBlock:
+ event = asyncio.Event()
+ self._wait_queue[borrower] = event
+ try:
+ await event.wait()
+ except BaseException:
+ self._wait_queue.pop(borrower, None)
+ raise
+
+ self._borrowers.add(borrower)
+ else:
+ try:
+ await cancel_shielded_checkpoint()
+ except BaseException:
+ self.release()
+ raise
+
+ def release(self) -> None:
+ self.release_on_behalf_of(current_task())
+
+ def release_on_behalf_of(self, borrower: object) -> None:
+ try:
+ self._borrowers.remove(borrower)
+ except KeyError:
+ raise RuntimeError(
+ "this borrower isn't holding any of this CapacityLimiter's " "tokens"
+ ) from None
+
+ # Notify the next task in line if this limiter has free capacity now
+ if self._wait_queue and len(self._borrowers) < self._total_tokens:
+ event = self._wait_queue.popitem(last=False)[1]
+ event.set()
+
+ def statistics(self) -> CapacityLimiterStatistics:
+ return CapacityLimiterStatistics(
+ self.borrowed_tokens,
+ self.total_tokens,
+ tuple(self._borrowers),
+ len(self._wait_queue),
+ )
+
+
+_default_thread_limiter: RunVar[CapacityLimiter] = RunVar("_default_thread_limiter")
+
+
+def current_default_thread_limiter() -> CapacityLimiter:
+ try:
+ return _default_thread_limiter.get()
+ except LookupError:
+ limiter = CapacityLimiter(40)
+ _default_thread_limiter.set(limiter)
+ return limiter
+
+
+#
+# Operating system signals
+#
+
+
+class _SignalReceiver(DeprecatedAsyncContextManager["_SignalReceiver"]):
+ def __init__(self, signals: tuple[int, ...]):
+ self._signals = signals
+ self._loop = get_running_loop()
+ self._signal_queue: deque[int] = deque()
+ self._future: asyncio.Future = asyncio.Future()
+ self._handled_signals: set[int] = set()
+
+ def _deliver(self, signum: int) -> None:
+ self._signal_queue.append(signum)
+ if not self._future.done():
+ self._future.set_result(None)
+
+ def __enter__(self) -> _SignalReceiver:
+ for sig in set(self._signals):
+ self._loop.add_signal_handler(sig, self._deliver, sig)
+ self._handled_signals.add(sig)
+
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> bool | None:
+ for sig in self._handled_signals:
+ self._loop.remove_signal_handler(sig)
+ return None
+
+ def __aiter__(self) -> _SignalReceiver:
+ return self
+
+ async def __anext__(self) -> int:
+ await checkpoint()
+ if not self._signal_queue:
+ self._future = asyncio.Future()
+ await self._future
+
+ return self._signal_queue.popleft()
+
+
+def open_signal_receiver(*signals: int) -> _SignalReceiver:
+ return _SignalReceiver(signals)
+
+
+#
+# Testing and debugging
+#
+
+
+def _create_task_info(task: asyncio.Task) -> TaskInfo:
+ task_state = _task_states.get(task)
+ if task_state is None:
+ name = task.get_name() if _native_task_names else None
+ parent_id = None
+ else:
+ name = task_state.name
+ parent_id = task_state.parent_id
+
+ return TaskInfo(id(task), parent_id, name, get_coro(task))
+
+
+def get_current_task() -> TaskInfo:
+ return _create_task_info(current_task()) # type: ignore[arg-type]
+
+
+def get_running_tasks() -> list[TaskInfo]:
+ return [_create_task_info(task) for task in all_tasks() if not task.done()]
+
+
+async def wait_all_tasks_blocked() -> None:
+ await checkpoint()
+ this_task = current_task()
+ while True:
+ for task in all_tasks():
+ if task is this_task:
+ continue
+
+ if task._fut_waiter is None or task._fut_waiter.done(): # type: ignore[attr-defined]
+ await sleep(0.1)
+ break
+ else:
+ return
+
+
+class TestRunner(abc.TestRunner):
+ def __init__(
+ self,
+ debug: bool = False,
+ use_uvloop: bool = False,
+ policy: asyncio.AbstractEventLoopPolicy | None = None,
+ ):
+ self._exceptions: list[BaseException] = []
+ _maybe_set_event_loop_policy(policy, use_uvloop)
+ self._loop = asyncio.new_event_loop()
+ self._loop.set_debug(debug)
+ self._loop.set_exception_handler(self._exception_handler)
+ asyncio.set_event_loop(self._loop)
+
+ def _cancel_all_tasks(self) -> None:
+ to_cancel = all_tasks(self._loop)
+ if not to_cancel:
+ return
+
+ for task in to_cancel:
+ task.cancel()
+
+ self._loop.run_until_complete(
+ asyncio.gather(*to_cancel, return_exceptions=True)
+ )
+
+ for task in to_cancel:
+ if task.cancelled():
+ continue
+ if task.exception() is not None:
+ raise cast(BaseException, task.exception())
+
+ def _exception_handler(
+ self, loop: asyncio.AbstractEventLoop, context: dict[str, Any]
+ ) -> None:
+ if isinstance(context.get("exception"), Exception):
+ self._exceptions.append(context["exception"])
+ else:
+ loop.default_exception_handler(context)
+
+ def _raise_async_exceptions(self) -> None:
+ # Re-raise any exceptions raised in asynchronous callbacks
+ if self._exceptions:
+ exceptions, self._exceptions = self._exceptions, []
+ if len(exceptions) == 1:
+ raise exceptions[0]
+ elif exceptions:
+ raise ExceptionGroup(exceptions)
+
+ def close(self) -> None:
+ try:
+ self._cancel_all_tasks()
+ self._loop.run_until_complete(self._loop.shutdown_asyncgens())
+ finally:
+ asyncio.set_event_loop(None)
+ self._loop.close()
+
+ def run_asyncgen_fixture(
+ self,
+ fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]],
+ kwargs: dict[str, Any],
+ ) -> Iterable[T_Retval]:
+ async def fixture_runner() -> None:
+ agen = fixture_func(**kwargs)
+ try:
+ retval = await agen.asend(None)
+ self._raise_async_exceptions()
+ except BaseException as exc:
+ f.set_exception(exc)
+ return
+ else:
+ f.set_result(retval)
+
+ await event.wait()
+ try:
+ await agen.asend(None)
+ except StopAsyncIteration:
+ pass
+ else:
+ await agen.aclose()
+ raise RuntimeError("Async generator fixture did not stop")
+
+ f = self._loop.create_future()
+ event = asyncio.Event()
+ fixture_task = self._loop.create_task(fixture_runner())
+ self._loop.run_until_complete(f)
+ yield f.result()
+ event.set()
+ self._loop.run_until_complete(fixture_task)
+ self._raise_async_exceptions()
+
+ def run_fixture(
+ self,
+ fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]],
+ kwargs: dict[str, Any],
+ ) -> T_Retval:
+ retval = self._loop.run_until_complete(fixture_func(**kwargs))
+ self._raise_async_exceptions()
+ return retval
+
+ def run_test(
+ self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any]
+ ) -> None:
+ try:
+ self._loop.run_until_complete(test_func(**kwargs))
+ except Exception as exc:
+ self._exceptions.append(exc)
+
+ self._raise_async_exceptions()
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_backends/_trio.py b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/_trio.py
new file mode 100644
index 00000000..cf289435
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/_backends/_trio.py
@@ -0,0 +1,996 @@
+from __future__ import annotations
+
+import array
+import math
+import socket
+from concurrent.futures import Future
+from contextvars import copy_context
+from dataclasses import dataclass
+from functools import partial
+from io import IOBase
+from os import PathLike
+from signal import Signals
+from types import TracebackType
+from typing import (
+ IO,
+ TYPE_CHECKING,
+ Any,
+ AsyncGenerator,
+ AsyncIterator,
+ Awaitable,
+ Callable,
+ Collection,
+ Coroutine,
+ Generic,
+ Iterable,
+ Mapping,
+ NoReturn,
+ Sequence,
+ TypeVar,
+ cast,
+)
+
+import sniffio
+import trio.from_thread
+from outcome import Error, Outcome, Value
+from trio.socket import SocketType as TrioSocketType
+from trio.to_thread import run_sync
+
+from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc
+from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable
+from .._core._eventloop import claim_worker_thread
+from .._core._exceptions import (
+ BrokenResourceError,
+ BusyResourceError,
+ ClosedResourceError,
+ EndOfStream,
+)
+from .._core._exceptions import ExceptionGroup as BaseExceptionGroup
+from .._core._sockets import convert_ipv6_sockaddr
+from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter
+from .._core._synchronization import Event as BaseEvent
+from .._core._synchronization import ResourceGuard
+from .._core._tasks import CancelScope as BaseCancelScope
+from ..abc import IPSockAddrType, UDPPacketType
+
+if TYPE_CHECKING:
+ from trio_typing import TaskStatus
+
+try:
+ from trio import lowlevel as trio_lowlevel
+except ImportError:
+ from trio import hazmat as trio_lowlevel # type: ignore[no-redef]
+ from trio.hazmat import wait_readable, wait_writable
+else:
+ from trio.lowlevel import wait_readable, wait_writable
+
+try:
+ trio_open_process = trio_lowlevel.open_process
+except AttributeError:
+ # isort: off
+ from trio import ( # type: ignore[attr-defined, no-redef]
+ open_process as trio_open_process,
+ )
+
+T_Retval = TypeVar("T_Retval")
+T_SockAddr = TypeVar("T_SockAddr", str, IPSockAddrType)
+
+
+#
+# Event loop
+#
+
+run = trio.run
+current_token = trio.lowlevel.current_trio_token
+RunVar = trio.lowlevel.RunVar
+
+
+#
+# Miscellaneous
+#
+
+sleep = trio.sleep
+
+
+#
+# Timeouts and cancellation
+#
+
+
+class CancelScope(BaseCancelScope):
+ def __new__(
+ cls, original: trio.CancelScope | None = None, **kwargs: object
+ ) -> CancelScope:
+ return object.__new__(cls)
+
+ def __init__(self, original: trio.CancelScope | None = None, **kwargs: Any) -> None:
+ self.__original = original or trio.CancelScope(**kwargs)
+
+ def __enter__(self) -> CancelScope:
+ self.__original.__enter__()
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> bool | None:
+ # https://github.com/python-trio/trio-typing/pull/79
+ return self.__original.__exit__( # type: ignore[func-returns-value]
+ exc_type, exc_val, exc_tb
+ )
+
+ def cancel(self) -> DeprecatedAwaitable:
+ self.__original.cancel()
+ return DeprecatedAwaitable(self.cancel)
+
+ @property
+ def deadline(self) -> float:
+ return self.__original.deadline
+
+ @deadline.setter
+ def deadline(self, value: float) -> None:
+ self.__original.deadline = value
+
+ @property
+ def cancel_called(self) -> bool:
+ return self.__original.cancel_called
+
+ @property
+ def shield(self) -> bool:
+ return self.__original.shield
+
+ @shield.setter
+ def shield(self, value: bool) -> None:
+ self.__original.shield = value
+
+
+CancelledError = trio.Cancelled
+checkpoint = trio.lowlevel.checkpoint
+checkpoint_if_cancelled = trio.lowlevel.checkpoint_if_cancelled
+cancel_shielded_checkpoint = trio.lowlevel.cancel_shielded_checkpoint
+current_effective_deadline = trio.current_effective_deadline
+current_time = trio.current_time
+
+
+#
+# Task groups
+#
+
+
+class ExceptionGroup(BaseExceptionGroup, trio.MultiError):
+ pass
+
+
+class TaskGroup(abc.TaskGroup):
+ def __init__(self) -> None:
+ self._active = False
+ self._nursery_manager = trio.open_nursery()
+ self.cancel_scope = None # type: ignore[assignment]
+
+ async def __aenter__(self) -> TaskGroup:
+ self._active = True
+ self._nursery = await self._nursery_manager.__aenter__()
+ self.cancel_scope = CancelScope(self._nursery.cancel_scope)
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> bool | None:
+ try:
+ return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb)
+ except trio.MultiError as exc:
+ raise ExceptionGroup(exc.exceptions) from None
+ finally:
+ self._active = False
+
+ def start_soon(
+ self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None
+ ) -> None:
+ if not self._active:
+ raise RuntimeError(
+ "This task group is not active; no new tasks can be started."
+ )
+
+ self._nursery.start_soon(func, *args, name=name)
+
+ async def start(
+ self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None
+ ) -> object:
+ if not self._active:
+ raise RuntimeError(
+ "This task group is not active; no new tasks can be started."
+ )
+
+ return await self._nursery.start(func, *args, name=name)
+
+
+#
+# Threads
+#
+
+
+async def run_sync_in_worker_thread(
+ func: Callable[..., T_Retval],
+ *args: object,
+ cancellable: bool = False,
+ limiter: trio.CapacityLimiter | None = None,
+) -> T_Retval:
+ def wrapper() -> T_Retval:
+ with claim_worker_thread("trio"):
+ return func(*args)
+
+ # TODO: remove explicit context copying when trio 0.20 is the minimum requirement
+ context = copy_context()
+ context.run(sniffio.current_async_library_cvar.set, None)
+ return await run_sync(
+ context.run, wrapper, cancellable=cancellable, limiter=limiter
+ )
+
+
+# TODO: remove this workaround when trio 0.20 is the minimum requirement
+def run_async_from_thread(
+ fn: Callable[..., Awaitable[T_Retval]], *args: Any
+) -> T_Retval:
+ async def wrapper() -> T_Retval:
+ retval: T_Retval
+
+ async def inner() -> None:
+ nonlocal retval
+ __tracebackhide__ = True
+ retval = await fn(*args)
+
+ async with trio.open_nursery() as n:
+ context.run(n.start_soon, inner)
+
+ __tracebackhide__ = True
+ return retval # noqa: F821
+
+ context = copy_context()
+ context.run(sniffio.current_async_library_cvar.set, "trio")
+ return trio.from_thread.run(wrapper)
+
+
+def run_sync_from_thread(fn: Callable[..., T_Retval], *args: Any) -> T_Retval:
+ # TODO: remove explicit context copying when trio 0.20 is the minimum requirement
+ retval = trio.from_thread.run_sync(copy_context().run, fn, *args)
+ return cast(T_Retval, retval)
+
+
+class BlockingPortal(abc.BlockingPortal):
+ def __new__(cls) -> BlockingPortal:
+ return object.__new__(cls)
+
+ def __init__(self) -> None:
+ super().__init__()
+ self._token = trio.lowlevel.current_trio_token()
+
+ def _spawn_task_from_thread(
+ self,
+ func: Callable,
+ args: tuple,
+ kwargs: dict[str, Any],
+ name: object,
+ future: Future,
+ ) -> None:
+ context = copy_context()
+ context.run(sniffio.current_async_library_cvar.set, "trio")
+ trio.from_thread.run_sync(
+ context.run,
+ partial(self._task_group.start_soon, name=name),
+ self._call_func,
+ func,
+ args,
+ kwargs,
+ future,
+ trio_token=self._token,
+ )
+
+
+#
+# Subprocesses
+#
+
+
+@dataclass(eq=False)
+class ReceiveStreamWrapper(abc.ByteReceiveStream):
+ _stream: trio.abc.ReceiveStream
+
+ async def receive(self, max_bytes: int | None = None) -> bytes:
+ try:
+ data = await self._stream.receive_some(max_bytes)
+ except trio.ClosedResourceError as exc:
+ raise ClosedResourceError from exc.__cause__
+ except trio.BrokenResourceError as exc:
+ raise BrokenResourceError from exc.__cause__
+
+ if data:
+ return data
+ else:
+ raise EndOfStream
+
+ async def aclose(self) -> None:
+ await self._stream.aclose()
+
+
+@dataclass(eq=False)
+class SendStreamWrapper(abc.ByteSendStream):
+ _stream: trio.abc.SendStream
+
+ async def send(self, item: bytes) -> None:
+ try:
+ await self._stream.send_all(item)
+ except trio.ClosedResourceError as exc:
+ raise ClosedResourceError from exc.__cause__
+ except trio.BrokenResourceError as exc:
+ raise BrokenResourceError from exc.__cause__
+
+ async def aclose(self) -> None:
+ await self._stream.aclose()
+
+
+@dataclass(eq=False)
+class Process(abc.Process):
+ _process: trio.Process
+ _stdin: abc.ByteSendStream | None
+ _stdout: abc.ByteReceiveStream | None
+ _stderr: abc.ByteReceiveStream | None
+
+ async def aclose(self) -> None:
+ if self._stdin:
+ await self._stdin.aclose()
+ if self._stdout:
+ await self._stdout.aclose()
+ if self._stderr:
+ await self._stderr.aclose()
+
+ await self.wait()
+
+ async def wait(self) -> int:
+ return await self._process.wait()
+
+ def terminate(self) -> None:
+ self._process.terminate()
+
+ def kill(self) -> None:
+ self._process.kill()
+
+ def send_signal(self, signal: Signals) -> None:
+ self._process.send_signal(signal)
+
+ @property
+ def pid(self) -> int:
+ return self._process.pid
+
+ @property
+ def returncode(self) -> int | None:
+ return self._process.returncode
+
+ @property
+ def stdin(self) -> abc.ByteSendStream | None:
+ return self._stdin
+
+ @property
+ def stdout(self) -> abc.ByteReceiveStream | None:
+ return self._stdout
+
+ @property
+ def stderr(self) -> abc.ByteReceiveStream | None:
+ return self._stderr
+
+
+async def open_process(
+ command: str | bytes | Sequence[str | bytes],
+ *,
+ shell: bool,
+ stdin: int | IO[Any] | None,
+ stdout: int | IO[Any] | None,
+ stderr: int | IO[Any] | None,
+ cwd: str | bytes | PathLike | None = None,
+ env: Mapping[str, str] | None = None,
+ start_new_session: bool = False,
+) -> Process:
+ process = await trio_open_process( # type: ignore[misc]
+ command, # type: ignore[arg-type]
+ stdin=stdin,
+ stdout=stdout,
+ stderr=stderr,
+ shell=shell,
+ cwd=cwd,
+ env=env,
+ start_new_session=start_new_session,
+ )
+ stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None
+ stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None
+ stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None
+ return Process(process, stdin_stream, stdout_stream, stderr_stream)
+
+
+class _ProcessPoolShutdownInstrument(trio.abc.Instrument):
+ def after_run(self) -> None:
+ super().after_run()
+
+
+current_default_worker_process_limiter: RunVar = RunVar(
+ "current_default_worker_process_limiter"
+)
+
+
+async def _shutdown_process_pool(workers: set[Process]) -> None:
+ process: Process
+ try:
+ await sleep(math.inf)
+ except trio.Cancelled:
+ for process in workers:
+ if process.returncode is None:
+ process.kill()
+
+ with CancelScope(shield=True):
+ for process in workers:
+ await process.aclose()
+
+
+def setup_process_pool_exit_at_shutdown(workers: set[Process]) -> None:
+ trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers)
+
+
+#
+# Sockets and networking
+#
+
+
+class _TrioSocketMixin(Generic[T_SockAddr]):
+ def __init__(self, trio_socket: TrioSocketType) -> None:
+ self._trio_socket = trio_socket
+ self._closed = False
+
+ def _check_closed(self) -> None:
+ if self._closed:
+ raise ClosedResourceError
+ if self._trio_socket.fileno() < 0:
+ raise BrokenResourceError
+
+ @property
+ def _raw_socket(self) -> socket.socket:
+ return self._trio_socket._sock # type: ignore[attr-defined]
+
+ async def aclose(self) -> None:
+ if self._trio_socket.fileno() >= 0:
+ self._closed = True
+ self._trio_socket.close()
+
+ def _convert_socket_error(self, exc: BaseException) -> NoReturn:
+ if isinstance(exc, trio.ClosedResourceError):
+ raise ClosedResourceError from exc
+ elif self._trio_socket.fileno() < 0 and self._closed:
+ raise ClosedResourceError from None
+ elif isinstance(exc, OSError):
+ raise BrokenResourceError from exc
+ else:
+ raise exc
+
+
+class SocketStream(_TrioSocketMixin, abc.SocketStream):
+ def __init__(self, trio_socket: TrioSocketType) -> None:
+ super().__init__(trio_socket)
+ self._receive_guard = ResourceGuard("reading from")
+ self._send_guard = ResourceGuard("writing to")
+
+ async def receive(self, max_bytes: int = 65536) -> bytes:
+ with self._receive_guard:
+ try:
+ data = await self._trio_socket.recv(max_bytes)
+ except BaseException as exc:
+ self._convert_socket_error(exc)
+
+ if data:
+ return data
+ else:
+ raise EndOfStream
+
+ async def send(self, item: bytes) -> None:
+ with self._send_guard:
+ view = memoryview(item)
+ while view:
+ try:
+ bytes_sent = await self._trio_socket.send(view)
+ except BaseException as exc:
+ self._convert_socket_error(exc)
+
+ view = view[bytes_sent:]
+
+ async def send_eof(self) -> None:
+ self._trio_socket.shutdown(socket.SHUT_WR)
+
+
+class UNIXSocketStream(SocketStream, abc.UNIXSocketStream):
+ async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]:
+ if not isinstance(msglen, int) or msglen < 0:
+ raise ValueError("msglen must be a non-negative integer")
+ if not isinstance(maxfds, int) or maxfds < 1:
+ raise ValueError("maxfds must be a positive integer")
+
+ fds = array.array("i")
+ await checkpoint()
+ with self._receive_guard:
+ while True:
+ try:
+ message, ancdata, flags, addr = await self._trio_socket.recvmsg(
+ msglen, socket.CMSG_LEN(maxfds * fds.itemsize)
+ )
+ except BaseException as exc:
+ self._convert_socket_error(exc)
+ else:
+ if not message and not ancdata:
+ raise EndOfStream
+
+ break
+
+ for cmsg_level, cmsg_type, cmsg_data in ancdata:
+ if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS:
+ raise RuntimeError(
+ f"Received unexpected ancillary data; message = {message!r}, "
+ f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}"
+ )
+
+ fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
+
+ return message, list(fds)
+
+ async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None:
+ if not message:
+ raise ValueError("message must not be empty")
+ if not fds:
+ raise ValueError("fds must not be empty")
+
+ filenos: list[int] = []
+ for fd in fds:
+ if isinstance(fd, int):
+ filenos.append(fd)
+ elif isinstance(fd, IOBase):
+ filenos.append(fd.fileno())
+
+ fdarray = array.array("i", filenos)
+ await checkpoint()
+ with self._send_guard:
+ while True:
+ try:
+ await self._trio_socket.sendmsg(
+ [message],
+ [
+ (
+ socket.SOL_SOCKET,
+ socket.SCM_RIGHTS, # type: ignore[list-item]
+ fdarray,
+ )
+ ],
+ )
+ break
+ except BaseException as exc:
+ self._convert_socket_error(exc)
+
+
+class TCPSocketListener(_TrioSocketMixin, abc.SocketListener):
+ def __init__(self, raw_socket: socket.socket):
+ super().__init__(trio.socket.from_stdlib_socket(raw_socket))
+ self._accept_guard = ResourceGuard("accepting connections from")
+
+ async def accept(self) -> SocketStream:
+ with self._accept_guard:
+ try:
+ trio_socket, _addr = await self._trio_socket.accept()
+ except BaseException as exc:
+ self._convert_socket_error(exc)
+
+ trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+ return SocketStream(trio_socket)
+
+
+class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener):
+ def __init__(self, raw_socket: socket.socket):
+ super().__init__(trio.socket.from_stdlib_socket(raw_socket))
+ self._accept_guard = ResourceGuard("accepting connections from")
+
+ async def accept(self) -> UNIXSocketStream:
+ with self._accept_guard:
+ try:
+ trio_socket, _addr = await self._trio_socket.accept()
+ except BaseException as exc:
+ self._convert_socket_error(exc)
+
+ return UNIXSocketStream(trio_socket)
+
+
+class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket):
+ def __init__(self, trio_socket: TrioSocketType) -> None:
+ super().__init__(trio_socket)
+ self._receive_guard = ResourceGuard("reading from")
+ self._send_guard = ResourceGuard("writing to")
+
+ async def receive(self) -> tuple[bytes, IPSockAddrType]:
+ with self._receive_guard:
+ try:
+ data, addr = await self._trio_socket.recvfrom(65536)
+ return data, convert_ipv6_sockaddr(addr)
+ except BaseException as exc:
+ self._convert_socket_error(exc)
+
+ async def send(self, item: UDPPacketType) -> None:
+ with self._send_guard:
+ try:
+ await self._trio_socket.sendto(*item)
+ except BaseException as exc:
+ self._convert_socket_error(exc)
+
+
+class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket):
+ def __init__(self, trio_socket: TrioSocketType) -> None:
+ super().__init__(trio_socket)
+ self._receive_guard = ResourceGuard("reading from")
+ self._send_guard = ResourceGuard("writing to")
+
+ async def receive(self) -> bytes:
+ with self._receive_guard:
+ try:
+ return await self._trio_socket.recv(65536)
+ except BaseException as exc:
+ self._convert_socket_error(exc)
+
+ async def send(self, item: bytes) -> None:
+ with self._send_guard:
+ try:
+ await self._trio_socket.send(item)
+ except BaseException as exc:
+ self._convert_socket_error(exc)
+
+
+async def connect_tcp(
+ host: str, port: int, local_address: IPSockAddrType | None = None
+) -> SocketStream:
+ family = socket.AF_INET6 if ":" in host else socket.AF_INET
+ trio_socket = trio.socket.socket(family)
+ trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+ if local_address:
+ await trio_socket.bind(local_address)
+
+ try:
+ await trio_socket.connect((host, port))
+ except BaseException:
+ trio_socket.close()
+ raise
+
+ return SocketStream(trio_socket)
+
+
+async def connect_unix(path: str) -> UNIXSocketStream:
+ trio_socket = trio.socket.socket(socket.AF_UNIX)
+ try:
+ await trio_socket.connect(path)
+ except BaseException:
+ trio_socket.close()
+ raise
+
+ return UNIXSocketStream(trio_socket)
+
+
+async def create_udp_socket(
+ family: socket.AddressFamily,
+ local_address: IPSockAddrType | None,
+ remote_address: IPSockAddrType | None,
+ reuse_port: bool,
+) -> UDPSocket | ConnectedUDPSocket:
+ trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM)
+
+ if reuse_port:
+ trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
+
+ if local_address:
+ await trio_socket.bind(local_address)
+
+ if remote_address:
+ await trio_socket.connect(remote_address)
+ return ConnectedUDPSocket(trio_socket)
+ else:
+ return UDPSocket(trio_socket)
+
+
+getaddrinfo = trio.socket.getaddrinfo
+getnameinfo = trio.socket.getnameinfo
+
+
+async def wait_socket_readable(sock: socket.socket) -> None:
+ try:
+ await wait_readable(sock)
+ except trio.ClosedResourceError as exc:
+ raise ClosedResourceError().with_traceback(exc.__traceback__) from None
+ except trio.BusyResourceError:
+ raise BusyResourceError("reading from") from None
+
+
+async def wait_socket_writable(sock: socket.socket) -> None:
+ try:
+ await wait_writable(sock)
+ except trio.ClosedResourceError as exc:
+ raise ClosedResourceError().with_traceback(exc.__traceback__) from None
+ except trio.BusyResourceError:
+ raise BusyResourceError("writing to") from None
+
+
+#
+# Synchronization
+#
+
+
+class Event(BaseEvent):
+ def __new__(cls) -> Event:
+ return object.__new__(cls)
+
+ def __init__(self) -> None:
+ self.__original = trio.Event()
+
+ def is_set(self) -> bool:
+ return self.__original.is_set()
+
+ async def wait(self) -> None:
+ return await self.__original.wait()
+
+ def statistics(self) -> EventStatistics:
+ orig_statistics = self.__original.statistics()
+ return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting)
+
+ def set(self) -> DeprecatedAwaitable:
+ self.__original.set()
+ return DeprecatedAwaitable(self.set)
+
+
+class CapacityLimiter(BaseCapacityLimiter):
+ def __new__(cls, *args: object, **kwargs: object) -> CapacityLimiter:
+ return object.__new__(cls)
+
+ def __init__(
+ self, *args: Any, original: trio.CapacityLimiter | None = None
+ ) -> None:
+ self.__original = original or trio.CapacityLimiter(*args)
+
+ async def __aenter__(self) -> None:
+ return await self.__original.__aenter__()
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> None:
+ await self.__original.__aexit__(exc_type, exc_val, exc_tb)
+
+ @property
+ def total_tokens(self) -> float:
+ return self.__original.total_tokens
+
+ @total_tokens.setter
+ def total_tokens(self, value: float) -> None:
+ self.__original.total_tokens = value
+
+ @property
+ def borrowed_tokens(self) -> int:
+ return self.__original.borrowed_tokens
+
+ @property
+ def available_tokens(self) -> float:
+ return self.__original.available_tokens
+
+ def acquire_nowait(self) -> DeprecatedAwaitable:
+ self.__original.acquire_nowait()
+ return DeprecatedAwaitable(self.acquire_nowait)
+
+ def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable:
+ self.__original.acquire_on_behalf_of_nowait(borrower)
+ return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait)
+
+ async def acquire(self) -> None:
+ await self.__original.acquire()
+
+ async def acquire_on_behalf_of(self, borrower: object) -> None:
+ await self.__original.acquire_on_behalf_of(borrower)
+
+ def release(self) -> None:
+ return self.__original.release()
+
+ def release_on_behalf_of(self, borrower: object) -> None:
+ return self.__original.release_on_behalf_of(borrower)
+
+ def statistics(self) -> CapacityLimiterStatistics:
+ orig = self.__original.statistics()
+ return CapacityLimiterStatistics(
+ borrowed_tokens=orig.borrowed_tokens,
+ total_tokens=orig.total_tokens,
+ borrowers=orig.borrowers,
+ tasks_waiting=orig.tasks_waiting,
+ )
+
+
+_capacity_limiter_wrapper: RunVar = RunVar("_capacity_limiter_wrapper")
+
+
+def current_default_thread_limiter() -> CapacityLimiter:
+ try:
+ return _capacity_limiter_wrapper.get()
+ except LookupError:
+ limiter = CapacityLimiter(
+ original=trio.to_thread.current_default_thread_limiter()
+ )
+ _capacity_limiter_wrapper.set(limiter)
+ return limiter
+
+
+#
+# Signal handling
+#
+
+
+class _SignalReceiver(DeprecatedAsyncContextManager["_SignalReceiver"]):
+ _iterator: AsyncIterator[int]
+
+ def __init__(self, signals: tuple[Signals, ...]):
+ self._signals = signals
+
+ def __enter__(self) -> _SignalReceiver:
+ self._cm = trio.open_signal_receiver(*self._signals)
+ self._iterator = self._cm.__enter__()
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> bool | None:
+ return self._cm.__exit__(exc_type, exc_val, exc_tb)
+
+ def __aiter__(self) -> _SignalReceiver:
+ return self
+
+ async def __anext__(self) -> Signals:
+ signum = await self._iterator.__anext__()
+ return Signals(signum)
+
+
+def open_signal_receiver(*signals: Signals) -> _SignalReceiver:
+ return _SignalReceiver(signals)
+
+
+#
+# Testing and debugging
+#
+
+
+def get_current_task() -> TaskInfo:
+ task = trio_lowlevel.current_task()
+
+ parent_id = None
+ if task.parent_nursery and task.parent_nursery.parent_task:
+ parent_id = id(task.parent_nursery.parent_task)
+
+ return TaskInfo(id(task), parent_id, task.name, task.coro)
+
+
+def get_running_tasks() -> list[TaskInfo]:
+ root_task = trio_lowlevel.current_root_task()
+ task_infos = [TaskInfo(id(root_task), None, root_task.name, root_task.coro)]
+ nurseries = root_task.child_nurseries
+ while nurseries:
+ new_nurseries: list[trio.Nursery] = []
+ for nursery in nurseries:
+ for task in nursery.child_tasks:
+ task_infos.append(
+ TaskInfo(id(task), id(nursery.parent_task), task.name, task.coro)
+ )
+ new_nurseries.extend(task.child_nurseries)
+
+ nurseries = new_nurseries
+
+ return task_infos
+
+
+def wait_all_tasks_blocked() -> Awaitable[None]:
+ import trio.testing
+
+ return trio.testing.wait_all_tasks_blocked()
+
+
+class TestRunner(abc.TestRunner):
+ def __init__(self, **options: Any) -> None:
+ from collections import deque
+ from queue import Queue
+
+ self._call_queue: Queue[Callable[..., object]] = Queue()
+ self._result_queue: deque[Outcome] = deque()
+ self._stop_event: trio.Event | None = None
+ self._nursery: trio.Nursery | None = None
+ self._options = options
+
+ async def _trio_main(self) -> None:
+ self._stop_event = trio.Event()
+ async with trio.open_nursery() as self._nursery:
+ await self._stop_event.wait()
+
+ async def _call_func(
+ self, func: Callable[..., Awaitable[object]], args: tuple, kwargs: dict
+ ) -> None:
+ try:
+ retval = await func(*args, **kwargs)
+ except BaseException as exc:
+ self._result_queue.append(Error(exc))
+ else:
+ self._result_queue.append(Value(retval))
+
+ def _main_task_finished(self, outcome: object) -> None:
+ self._nursery = None
+
+ def _get_nursery(self) -> trio.Nursery:
+ if self._nursery is None:
+ trio.lowlevel.start_guest_run(
+ self._trio_main,
+ run_sync_soon_threadsafe=self._call_queue.put,
+ done_callback=self._main_task_finished,
+ **self._options,
+ )
+ while self._nursery is None:
+ self._call_queue.get()()
+
+ return self._nursery
+
+ def _call(
+ self, func: Callable[..., Awaitable[T_Retval]], *args: object, **kwargs: object
+ ) -> T_Retval:
+ self._get_nursery().start_soon(self._call_func, func, args, kwargs)
+ while not self._result_queue:
+ self._call_queue.get()()
+
+ outcome = self._result_queue.pop()
+ return outcome.unwrap()
+
+ def close(self) -> None:
+ if self._stop_event:
+ self._stop_event.set()
+ while self._nursery is not None:
+ self._call_queue.get()()
+
+ def run_asyncgen_fixture(
+ self,
+ fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]],
+ kwargs: dict[str, Any],
+ ) -> Iterable[T_Retval]:
+ async def fixture_runner(*, task_status: TaskStatus[T_Retval]) -> None:
+ agen = fixture_func(**kwargs)
+ retval = await agen.asend(None)
+ task_status.started(retval)
+ await teardown_event.wait()
+ try:
+ await agen.asend(None)
+ except StopAsyncIteration:
+ pass
+ else:
+ await agen.aclose()
+ raise RuntimeError("Async generator fixture did not stop")
+
+ teardown_event = trio.Event()
+ fixture_value = self._call(lambda: self._get_nursery().start(fixture_runner))
+ yield fixture_value
+ teardown_event.set()
+
+ def run_fixture(
+ self,
+ fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]],
+ kwargs: dict[str, Any],
+ ) -> T_Retval:
+ return self._call(fixture_func, **kwargs)
+
+ def run_test(
+ self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any]
+ ) -> None:
+ self._call(test_func, **kwargs)
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__init__.py b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..7fd6528b
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_compat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_compat.cpython-312.pyc
new file mode 100644
index 00000000..b299ac70
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_compat.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc
new file mode 100644
index 00000000..69b794da
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc
new file mode 100644
index 00000000..e7ad8596
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc
new file mode 100644
index 00000000..d4c12e35
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc
new file mode 100644
index 00000000..93f3ab4b
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc
new file mode 100644
index 00000000..a345a263
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc
new file mode 100644
index 00000000..3f1e0263
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc
new file mode 100644
index 00000000..d90b4e4f
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc
new file mode 100644
index 00000000..46bad2e8
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc
new file mode 100644
index 00000000..e26147a1
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc
new file mode 100644
index 00000000..f26139b3
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc
new file mode 100644
index 00000000..eaadc412
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc
new file mode 100644
index 00000000..9ce132e0
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/_compat.py b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_compat.py
new file mode 100644
index 00000000..22d29ab8
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_compat.py
@@ -0,0 +1,217 @@
+from __future__ import annotations
+
+from abc import ABCMeta, abstractmethod
+from contextlib import AbstractContextManager
+from types import TracebackType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ AsyncContextManager,
+ Callable,
+ ContextManager,
+ Generator,
+ Generic,
+ Iterable,
+ List,
+ TypeVar,
+ Union,
+ overload,
+)
+from warnings import warn
+
+if TYPE_CHECKING:
+ from ._testing import TaskInfo
+else:
+ TaskInfo = object
+
+T = TypeVar("T")
+AnyDeprecatedAwaitable = Union[
+ "DeprecatedAwaitable",
+ "DeprecatedAwaitableFloat",
+ "DeprecatedAwaitableList[T]",
+ TaskInfo,
+]
+
+
+@overload
+async def maybe_async(__obj: TaskInfo) -> TaskInfo:
+ ...
+
+
+@overload
+async def maybe_async(__obj: DeprecatedAwaitableFloat) -> float:
+ ...
+
+
+@overload
+async def maybe_async(__obj: DeprecatedAwaitableList[T]) -> list[T]:
+ ...
+
+
+@overload
+async def maybe_async(__obj: DeprecatedAwaitable) -> None:
+ ...
+
+
+async def maybe_async(
+ __obj: AnyDeprecatedAwaitable[T],
+) -> TaskInfo | float | list[T] | None:
+ """
+ Await on the given object if necessary.
+
+ This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and
+ methods were converted from coroutine functions into regular functions.
+
+ Do **not** try to use this for any other purpose!
+
+ :return: the result of awaiting on the object if coroutine, or the object itself otherwise
+
+ .. versionadded:: 2.2
+
+ """
+ return __obj._unwrap()
+
+
+class _ContextManagerWrapper:
+ def __init__(self, cm: ContextManager[T]):
+ self._cm = cm
+
+ async def __aenter__(self) -> T:
+ return self._cm.__enter__()
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> bool | None:
+ return self._cm.__exit__(exc_type, exc_val, exc_tb)
+
+
+def maybe_async_cm(
+ cm: ContextManager[T] | AsyncContextManager[T],
+) -> AsyncContextManager[T]:
+ """
+ Wrap a regular context manager as an async one if necessary.
+
+ This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and
+ methods were changed to return regular context managers instead of async ones.
+
+ :param cm: a regular or async context manager
+ :return: an async context manager
+
+ .. versionadded:: 2.2
+
+ """
+ if not isinstance(cm, AbstractContextManager):
+ raise TypeError("Given object is not an context manager")
+
+ return _ContextManagerWrapper(cm)
+
+
+def _warn_deprecation(
+ awaitable: AnyDeprecatedAwaitable[Any], stacklevel: int = 1
+) -> None:
+ warn(
+ f'Awaiting on {awaitable._name}() is deprecated. Use "await '
+ f"anyio.maybe_async({awaitable._name}(...)) if you have to support both AnyIO 2.x "
+ f'and 3.x, or just remove the "await" if you are completely migrating to AnyIO 3+.',
+ DeprecationWarning,
+ stacklevel=stacklevel + 1,
+ )
+
+
+class DeprecatedAwaitable:
+ def __init__(self, func: Callable[..., DeprecatedAwaitable]):
+ self._name = f"{func.__module__}.{func.__qualname__}"
+
+ def __await__(self) -> Generator[None, None, None]:
+ _warn_deprecation(self)
+ if False:
+ yield
+
+ def __reduce__(self) -> tuple[type[None], tuple[()]]:
+ return type(None), ()
+
+ def _unwrap(self) -> None:
+ return None
+
+
+class DeprecatedAwaitableFloat(float):
+ def __new__(
+ cls, x: float, func: Callable[..., DeprecatedAwaitableFloat]
+ ) -> DeprecatedAwaitableFloat:
+ return super().__new__(cls, x)
+
+ def __init__(self, x: float, func: Callable[..., DeprecatedAwaitableFloat]):
+ self._name = f"{func.__module__}.{func.__qualname__}"
+
+ def __await__(self) -> Generator[None, None, float]:
+ _warn_deprecation(self)
+ if False:
+ yield
+
+ return float(self)
+
+ def __reduce__(self) -> tuple[type[float], tuple[float]]:
+ return float, (float(self),)
+
+ def _unwrap(self) -> float:
+ return float(self)
+
+
+class DeprecatedAwaitableList(List[T]):
+ def __init__(
+ self,
+ iterable: Iterable[T] = (),
+ *,
+ func: Callable[..., DeprecatedAwaitableList[T]],
+ ):
+ super().__init__(iterable)
+ self._name = f"{func.__module__}.{func.__qualname__}"
+
+ def __await__(self) -> Generator[None, None, list[T]]:
+ _warn_deprecation(self)
+ if False:
+ yield
+
+ return list(self)
+
+ def __reduce__(self) -> tuple[type[list[T]], tuple[list[T]]]:
+ return list, (list(self),)
+
+ def _unwrap(self) -> list[T]:
+ return list(self)
+
+
+class DeprecatedAsyncContextManager(Generic[T], metaclass=ABCMeta):
+ @abstractmethod
+ def __enter__(self) -> T:
+ pass
+
+ @abstractmethod
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> bool | None:
+ pass
+
+ async def __aenter__(self) -> T:
+ warn(
+ f"Using {self.__class__.__name__} as an async context manager has been deprecated. "
+ f'Use "async with anyio.maybe_async_cm(yourcontextmanager) as foo:" if you have to '
+ f'support both AnyIO 2.x and 3.x, or just remove the "async" from "async with" if '
+ f"you are completely migrating to AnyIO 3+.",
+ DeprecationWarning,
+ )
+ return self.__enter__()
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> bool | None:
+ return self.__exit__(exc_type, exc_val, exc_tb)
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/_eventloop.py b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_eventloop.py
new file mode 100644
index 00000000..ae986485
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_eventloop.py
@@ -0,0 +1,153 @@
+from __future__ import annotations
+
+import math
+import sys
+import threading
+from contextlib import contextmanager
+from importlib import import_module
+from typing import (
+ Any,
+ Awaitable,
+ Callable,
+ Generator,
+ TypeVar,
+)
+
+import sniffio
+
+# This must be updated when new backends are introduced
+from ._compat import DeprecatedAwaitableFloat
+
+BACKENDS = "asyncio", "trio"
+
+T_Retval = TypeVar("T_Retval")
+threadlocals = threading.local()
+
+
+def run(
+ func: Callable[..., Awaitable[T_Retval]],
+ *args: object,
+ backend: str = "asyncio",
+ backend_options: dict[str, Any] | None = None,
+) -> T_Retval:
+ """
+ Run the given coroutine function in an asynchronous event loop.
+
+ The current thread must not be already running an event loop.
+
+ :param func: a coroutine function
+ :param args: positional arguments to ``func``
+ :param backend: name of the asynchronous event loop implementation – currently either
+ ``asyncio`` or ``trio``
+ :param backend_options: keyword arguments to call the backend ``run()`` implementation with
+ (documented :ref:`here `)
+ :return: the return value of the coroutine function
+ :raises RuntimeError: if an asynchronous event loop is already running in this thread
+ :raises LookupError: if the named backend is not found
+
+ """
+ try:
+ asynclib_name = sniffio.current_async_library()
+ except sniffio.AsyncLibraryNotFoundError:
+ pass
+ else:
+ raise RuntimeError(f"Already running {asynclib_name} in this thread")
+
+ try:
+ asynclib = import_module(f"..._backends._{backend}", package=__name__)
+ except ImportError as exc:
+ raise LookupError(f"No such backend: {backend}") from exc
+
+ token = None
+ if sniffio.current_async_library_cvar.get(None) is None:
+ # Since we're in control of the event loop, we can cache the name of the async library
+ token = sniffio.current_async_library_cvar.set(backend)
+
+ try:
+ backend_options = backend_options or {}
+ return asynclib.run(func, *args, **backend_options)
+ finally:
+ if token:
+ sniffio.current_async_library_cvar.reset(token)
+
+
+async def sleep(delay: float) -> None:
+ """
+ Pause the current task for the specified duration.
+
+ :param delay: the duration, in seconds
+
+ """
+ return await get_asynclib().sleep(delay)
+
+
+async def sleep_forever() -> None:
+ """
+ Pause the current task until it's cancelled.
+
+ This is a shortcut for ``sleep(math.inf)``.
+
+ .. versionadded:: 3.1
+
+ """
+ await sleep(math.inf)
+
+
+async def sleep_until(deadline: float) -> None:
+ """
+ Pause the current task until the given time.
+
+ :param deadline: the absolute time to wake up at (according to the internal monotonic clock of
+ the event loop)
+
+ .. versionadded:: 3.1
+
+ """
+ now = current_time()
+ await sleep(max(deadline - now, 0))
+
+
+def current_time() -> DeprecatedAwaitableFloat:
+ """
+ Return the current value of the event loop's internal clock.
+
+ :return: the clock value (seconds)
+
+ """
+ return DeprecatedAwaitableFloat(get_asynclib().current_time(), current_time)
+
+
+def get_all_backends() -> tuple[str, ...]:
+ """Return a tuple of the names of all built-in backends."""
+ return BACKENDS
+
+
+def get_cancelled_exc_class() -> type[BaseException]:
+ """Return the current async library's cancellation exception class."""
+ return get_asynclib().CancelledError
+
+
+#
+# Private API
+#
+
+
+@contextmanager
+def claim_worker_thread(backend: str) -> Generator[Any, None, None]:
+ module = sys.modules["anyio._backends._" + backend]
+ threadlocals.current_async_module = module
+ try:
+ yield
+ finally:
+ del threadlocals.current_async_module
+
+
+def get_asynclib(asynclib_name: str | None = None) -> Any:
+ if asynclib_name is None:
+ asynclib_name = sniffio.current_async_library()
+
+ modulename = "anyio._backends._" + asynclib_name
+ try:
+ return sys.modules[modulename]
+ except KeyError:
+ return import_module(modulename)
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/_exceptions.py b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_exceptions.py
new file mode 100644
index 00000000..92ccd77a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_exceptions.py
@@ -0,0 +1,94 @@
+from __future__ import annotations
+
+from traceback import format_exception
+
+
+class BrokenResourceError(Exception):
+ """
+ Raised when trying to use a resource that has been rendered unusable due to external causes
+ (e.g. a send stream whose peer has disconnected).
+ """
+
+
+class BrokenWorkerProcess(Exception):
+ """
+ Raised by :func:`run_sync_in_process` if the worker process terminates abruptly or otherwise
+ misbehaves.
+ """
+
+
+class BusyResourceError(Exception):
+ """Raised when two tasks are trying to read from or write to the same resource concurrently."""
+
+ def __init__(self, action: str):
+ super().__init__(f"Another task is already {action} this resource")
+
+
+class ClosedResourceError(Exception):
+ """Raised when trying to use a resource that has been closed."""
+
+
+class DelimiterNotFound(Exception):
+ """
+ Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
+ maximum number of bytes has been read without the delimiter being found.
+ """
+
+ def __init__(self, max_bytes: int) -> None:
+ super().__init__(
+ f"The delimiter was not found among the first {max_bytes} bytes"
+ )
+
+
+class EndOfStream(Exception):
+ """Raised when trying to read from a stream that has been closed from the other end."""
+
+
+class ExceptionGroup(BaseException):
+ """
+ Raised when multiple exceptions have been raised in a task group.
+
+ :var ~typing.Sequence[BaseException] exceptions: the sequence of exceptions raised together
+ """
+
+ SEPARATOR = "----------------------------\n"
+
+ exceptions: list[BaseException]
+
+ def __str__(self) -> str:
+ tracebacks = [
+ "".join(format_exception(type(exc), exc, exc.__traceback__))
+ for exc in self.exceptions
+ ]
+ return (
+ f"{len(self.exceptions)} exceptions were raised in the task group:\n"
+ f"{self.SEPARATOR}{self.SEPARATOR.join(tracebacks)}"
+ )
+
+ def __repr__(self) -> str:
+ exception_reprs = ", ".join(repr(exc) for exc in self.exceptions)
+ return f"<{self.__class__.__name__}: {exception_reprs}>"
+
+
+class IncompleteRead(Exception):
+ """
+ Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or
+ :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
+ connection is closed before the requested amount of bytes has been read.
+ """
+
+ def __init__(self) -> None:
+ super().__init__(
+ "The stream was closed before the read operation could be completed"
+ )
+
+
+class TypedAttributeLookupError(LookupError):
+ """
+ Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute is not
+ found and no default value has been given.
+ """
+
+
+class WouldBlock(Exception):
+ """Raised by ``X_nowait`` functions if ``X()`` would block."""
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/_fileio.py b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_fileio.py
new file mode 100644
index 00000000..35e8e8af
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_fileio.py
@@ -0,0 +1,603 @@
+from __future__ import annotations
+
+import os
+import pathlib
+import sys
+from dataclasses import dataclass
+from functools import partial
+from os import PathLike
+from typing import (
+ IO,
+ TYPE_CHECKING,
+ Any,
+ AnyStr,
+ AsyncIterator,
+ Callable,
+ Generic,
+ Iterable,
+ Iterator,
+ Sequence,
+ cast,
+ overload,
+)
+
+from .. import to_thread
+from ..abc import AsyncResource
+
+if sys.version_info >= (3, 8):
+ from typing import Final
+else:
+ from typing_extensions import Final
+
+if TYPE_CHECKING:
+ from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer
+else:
+ ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object
+
+
+class AsyncFile(AsyncResource, Generic[AnyStr]):
+ """
+ An asynchronous file object.
+
+ This class wraps a standard file object and provides async friendly versions of the following
+ blocking methods (where available on the original file object):
+
+ * read
+ * read1
+ * readline
+ * readlines
+ * readinto
+ * readinto1
+ * write
+ * writelines
+ * truncate
+ * seek
+ * tell
+ * flush
+
+ All other methods are directly passed through.
+
+ This class supports the asynchronous context manager protocol which closes the underlying file
+ at the end of the context block.
+
+ This class also supports asynchronous iteration::
+
+ async with await open_file(...) as f:
+ async for line in f:
+ print(line)
+ """
+
+ def __init__(self, fp: IO[AnyStr]) -> None:
+ self._fp: Any = fp
+
+ def __getattr__(self, name: str) -> object:
+ return getattr(self._fp, name)
+
+ @property
+ def wrapped(self) -> IO[AnyStr]:
+ """The wrapped file object."""
+ return self._fp
+
+ async def __aiter__(self) -> AsyncIterator[AnyStr]:
+ while True:
+ line = await self.readline()
+ if line:
+ yield line
+ else:
+ break
+
+ async def aclose(self) -> None:
+ return await to_thread.run_sync(self._fp.close)
+
+ async def read(self, size: int = -1) -> AnyStr:
+ return await to_thread.run_sync(self._fp.read, size)
+
+ async def read1(self: AsyncFile[bytes], size: int = -1) -> bytes:
+ return await to_thread.run_sync(self._fp.read1, size)
+
+ async def readline(self) -> AnyStr:
+ return await to_thread.run_sync(self._fp.readline)
+
+ async def readlines(self) -> list[AnyStr]:
+ return await to_thread.run_sync(self._fp.readlines)
+
+ async def readinto(self: AsyncFile[bytes], b: WriteableBuffer) -> bytes:
+ return await to_thread.run_sync(self._fp.readinto, b)
+
+ async def readinto1(self: AsyncFile[bytes], b: WriteableBuffer) -> bytes:
+ return await to_thread.run_sync(self._fp.readinto1, b)
+
+ @overload
+ async def write(self: AsyncFile[bytes], b: ReadableBuffer) -> int:
+ ...
+
+ @overload
+ async def write(self: AsyncFile[str], b: str) -> int:
+ ...
+
+ async def write(self, b: ReadableBuffer | str) -> int:
+ return await to_thread.run_sync(self._fp.write, b)
+
+ @overload
+ async def writelines(
+ self: AsyncFile[bytes], lines: Iterable[ReadableBuffer]
+ ) -> None:
+ ...
+
+ @overload
+ async def writelines(self: AsyncFile[str], lines: Iterable[str]) -> None:
+ ...
+
+ async def writelines(self, lines: Iterable[ReadableBuffer] | Iterable[str]) -> None:
+ return await to_thread.run_sync(self._fp.writelines, lines)
+
+ async def truncate(self, size: int | None = None) -> int:
+ return await to_thread.run_sync(self._fp.truncate, size)
+
+ async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int:
+ return await to_thread.run_sync(self._fp.seek, offset, whence)
+
+ async def tell(self) -> int:
+ return await to_thread.run_sync(self._fp.tell)
+
+ async def flush(self) -> None:
+ return await to_thread.run_sync(self._fp.flush)
+
+
+@overload
+async def open_file(
+ file: str | PathLike[str] | int,
+ mode: OpenBinaryMode,
+ buffering: int = ...,
+ encoding: str | None = ...,
+ errors: str | None = ...,
+ newline: str | None = ...,
+ closefd: bool = ...,
+ opener: Callable[[str, int], int] | None = ...,
+) -> AsyncFile[bytes]:
+ ...
+
+
+@overload
+async def open_file(
+ file: str | PathLike[str] | int,
+ mode: OpenTextMode = ...,
+ buffering: int = ...,
+ encoding: str | None = ...,
+ errors: str | None = ...,
+ newline: str | None = ...,
+ closefd: bool = ...,
+ opener: Callable[[str, int], int] | None = ...,
+) -> AsyncFile[str]:
+ ...
+
+
+async def open_file(
+ file: str | PathLike[str] | int,
+ mode: str = "r",
+ buffering: int = -1,
+ encoding: str | None = None,
+ errors: str | None = None,
+ newline: str | None = None,
+ closefd: bool = True,
+ opener: Callable[[str, int], int] | None = None,
+) -> AsyncFile[Any]:
+ """
+ Open a file asynchronously.
+
+ The arguments are exactly the same as for the builtin :func:`open`.
+
+ :return: an asynchronous file object
+
+ """
+ fp = await to_thread.run_sync(
+ open, file, mode, buffering, encoding, errors, newline, closefd, opener
+ )
+ return AsyncFile(fp)
+
+
+def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]:
+ """
+ Wrap an existing file as an asynchronous file.
+
+ :param file: an existing file-like object
+ :return: an asynchronous file object
+
+ """
+ return AsyncFile(file)
+
+
+@dataclass(eq=False)
+class _PathIterator(AsyncIterator["Path"]):
+ iterator: Iterator[PathLike[str]]
+
+ async def __anext__(self) -> Path:
+ nextval = await to_thread.run_sync(next, self.iterator, None, cancellable=True)
+ if nextval is None:
+ raise StopAsyncIteration from None
+
+ return Path(cast("PathLike[str]", nextval))
+
+
+class Path:
+ """
+ An asynchronous version of :class:`pathlib.Path`.
+
+ This class cannot be substituted for :class:`pathlib.Path` or :class:`pathlib.PurePath`, but
+ it is compatible with the :class:`os.PathLike` interface.
+
+ It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for the
+ deprecated :meth:`~pathlib.Path.link_to` method.
+
+ Any methods that do disk I/O need to be awaited on. These methods are:
+
+ * :meth:`~pathlib.Path.absolute`
+ * :meth:`~pathlib.Path.chmod`
+ * :meth:`~pathlib.Path.cwd`
+ * :meth:`~pathlib.Path.exists`
+ * :meth:`~pathlib.Path.expanduser`
+ * :meth:`~pathlib.Path.group`
+ * :meth:`~pathlib.Path.hardlink_to`
+ * :meth:`~pathlib.Path.home`
+ * :meth:`~pathlib.Path.is_block_device`
+ * :meth:`~pathlib.Path.is_char_device`
+ * :meth:`~pathlib.Path.is_dir`
+ * :meth:`~pathlib.Path.is_fifo`
+ * :meth:`~pathlib.Path.is_file`
+ * :meth:`~pathlib.Path.is_mount`
+ * :meth:`~pathlib.Path.lchmod`
+ * :meth:`~pathlib.Path.lstat`
+ * :meth:`~pathlib.Path.mkdir`
+ * :meth:`~pathlib.Path.open`
+ * :meth:`~pathlib.Path.owner`
+ * :meth:`~pathlib.Path.read_bytes`
+ * :meth:`~pathlib.Path.read_text`
+ * :meth:`~pathlib.Path.readlink`
+ * :meth:`~pathlib.Path.rename`
+ * :meth:`~pathlib.Path.replace`
+ * :meth:`~pathlib.Path.rmdir`
+ * :meth:`~pathlib.Path.samefile`
+ * :meth:`~pathlib.Path.stat`
+ * :meth:`~pathlib.Path.touch`
+ * :meth:`~pathlib.Path.unlink`
+ * :meth:`~pathlib.Path.write_bytes`
+ * :meth:`~pathlib.Path.write_text`
+
+ Additionally, the following methods return an async iterator yielding :class:`~.Path` objects:
+
+ * :meth:`~pathlib.Path.glob`
+ * :meth:`~pathlib.Path.iterdir`
+ * :meth:`~pathlib.Path.rglob`
+ """
+
+ __slots__ = "_path", "__weakref__"
+
+ __weakref__: Any
+
+ def __init__(self, *args: str | PathLike[str]) -> None:
+ self._path: Final[pathlib.Path] = pathlib.Path(*args)
+
+ def __fspath__(self) -> str:
+ return self._path.__fspath__()
+
+ def __str__(self) -> str:
+ return self._path.__str__()
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}({self.as_posix()!r})"
+
+ def __bytes__(self) -> bytes:
+ return self._path.__bytes__()
+
+ def __hash__(self) -> int:
+ return self._path.__hash__()
+
+ def __eq__(self, other: object) -> bool:
+ target = other._path if isinstance(other, Path) else other
+ return self._path.__eq__(target)
+
+ def __lt__(self, other: Path) -> bool:
+ target = other._path if isinstance(other, Path) else other
+ return self._path.__lt__(target)
+
+ def __le__(self, other: Path) -> bool:
+ target = other._path if isinstance(other, Path) else other
+ return self._path.__le__(target)
+
+ def __gt__(self, other: Path) -> bool:
+ target = other._path if isinstance(other, Path) else other
+ return self._path.__gt__(target)
+
+ def __ge__(self, other: Path) -> bool:
+ target = other._path if isinstance(other, Path) else other
+ return self._path.__ge__(target)
+
+ def __truediv__(self, other: Any) -> Path:
+ return Path(self._path / other)
+
+ def __rtruediv__(self, other: Any) -> Path:
+ return Path(other) / self
+
+ @property
+ def parts(self) -> tuple[str, ...]:
+ return self._path.parts
+
+ @property
+ def drive(self) -> str:
+ return self._path.drive
+
+ @property
+ def root(self) -> str:
+ return self._path.root
+
+ @property
+ def anchor(self) -> str:
+ return self._path.anchor
+
+ @property
+ def parents(self) -> Sequence[Path]:
+ return tuple(Path(p) for p in self._path.parents)
+
+ @property
+ def parent(self) -> Path:
+ return Path(self._path.parent)
+
+ @property
+ def name(self) -> str:
+ return self._path.name
+
+ @property
+ def suffix(self) -> str:
+ return self._path.suffix
+
+ @property
+ def suffixes(self) -> list[str]:
+ return self._path.suffixes
+
+ @property
+ def stem(self) -> str:
+ return self._path.stem
+
+ async def absolute(self) -> Path:
+ path = await to_thread.run_sync(self._path.absolute)
+ return Path(path)
+
+ def as_posix(self) -> str:
+ return self._path.as_posix()
+
+ def as_uri(self) -> str:
+ return self._path.as_uri()
+
+ def match(self, path_pattern: str) -> bool:
+ return self._path.match(path_pattern)
+
+ def is_relative_to(self, *other: str | PathLike[str]) -> bool:
+ try:
+ self.relative_to(*other)
+ return True
+ except ValueError:
+ return False
+
+ async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None:
+ func = partial(os.chmod, follow_symlinks=follow_symlinks)
+ return await to_thread.run_sync(func, self._path, mode)
+
+ @classmethod
+ async def cwd(cls) -> Path:
+ path = await to_thread.run_sync(pathlib.Path.cwd)
+ return cls(path)
+
+ async def exists(self) -> bool:
+ return await to_thread.run_sync(self._path.exists, cancellable=True)
+
+ async def expanduser(self) -> Path:
+ return Path(await to_thread.run_sync(self._path.expanduser, cancellable=True))
+
+ def glob(self, pattern: str) -> AsyncIterator[Path]:
+ gen = self._path.glob(pattern)
+ return _PathIterator(gen)
+
+ async def group(self) -> str:
+ return await to_thread.run_sync(self._path.group, cancellable=True)
+
+ async def hardlink_to(self, target: str | pathlib.Path | Path) -> None:
+ if isinstance(target, Path):
+ target = target._path
+
+ await to_thread.run_sync(os.link, target, self)
+
+ @classmethod
+ async def home(cls) -> Path:
+ home_path = await to_thread.run_sync(pathlib.Path.home)
+ return cls(home_path)
+
+ def is_absolute(self) -> bool:
+ return self._path.is_absolute()
+
+ async def is_block_device(self) -> bool:
+ return await to_thread.run_sync(self._path.is_block_device, cancellable=True)
+
+ async def is_char_device(self) -> bool:
+ return await to_thread.run_sync(self._path.is_char_device, cancellable=True)
+
+ async def is_dir(self) -> bool:
+ return await to_thread.run_sync(self._path.is_dir, cancellable=True)
+
+ async def is_fifo(self) -> bool:
+ return await to_thread.run_sync(self._path.is_fifo, cancellable=True)
+
+ async def is_file(self) -> bool:
+ return await to_thread.run_sync(self._path.is_file, cancellable=True)
+
+ async def is_mount(self) -> bool:
+ return await to_thread.run_sync(os.path.ismount, self._path, cancellable=True)
+
+ def is_reserved(self) -> bool:
+ return self._path.is_reserved()
+
+ async def is_socket(self) -> bool:
+ return await to_thread.run_sync(self._path.is_socket, cancellable=True)
+
+ async def is_symlink(self) -> bool:
+ return await to_thread.run_sync(self._path.is_symlink, cancellable=True)
+
+ def iterdir(self) -> AsyncIterator[Path]:
+ gen = self._path.iterdir()
+ return _PathIterator(gen)
+
+ def joinpath(self, *args: str | PathLike[str]) -> Path:
+ return Path(self._path.joinpath(*args))
+
+ async def lchmod(self, mode: int) -> None:
+ await to_thread.run_sync(self._path.lchmod, mode)
+
+ async def lstat(self) -> os.stat_result:
+ return await to_thread.run_sync(self._path.lstat, cancellable=True)
+
+ async def mkdir(
+ self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False
+ ) -> None:
+ await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok)
+
+ @overload
+ async def open(
+ self,
+ mode: OpenBinaryMode,
+ buffering: int = ...,
+ encoding: str | None = ...,
+ errors: str | None = ...,
+ newline: str | None = ...,
+ ) -> AsyncFile[bytes]:
+ ...
+
+ @overload
+ async def open(
+ self,
+ mode: OpenTextMode = ...,
+ buffering: int = ...,
+ encoding: str | None = ...,
+ errors: str | None = ...,
+ newline: str | None = ...,
+ ) -> AsyncFile[str]:
+ ...
+
+ async def open(
+ self,
+ mode: str = "r",
+ buffering: int = -1,
+ encoding: str | None = None,
+ errors: str | None = None,
+ newline: str | None = None,
+ ) -> AsyncFile[Any]:
+ fp = await to_thread.run_sync(
+ self._path.open, mode, buffering, encoding, errors, newline
+ )
+ return AsyncFile(fp)
+
+ async def owner(self) -> str:
+ return await to_thread.run_sync(self._path.owner, cancellable=True)
+
+ async def read_bytes(self) -> bytes:
+ return await to_thread.run_sync(self._path.read_bytes)
+
+ async def read_text(
+ self, encoding: str | None = None, errors: str | None = None
+ ) -> str:
+ return await to_thread.run_sync(self._path.read_text, encoding, errors)
+
+ def relative_to(self, *other: str | PathLike[str]) -> Path:
+ return Path(self._path.relative_to(*other))
+
+ async def readlink(self) -> Path:
+ target = await to_thread.run_sync(os.readlink, self._path)
+ return Path(cast(str, target))
+
+ async def rename(self, target: str | pathlib.PurePath | Path) -> Path:
+ if isinstance(target, Path):
+ target = target._path
+
+ await to_thread.run_sync(self._path.rename, target)
+ return Path(target)
+
+ async def replace(self, target: str | pathlib.PurePath | Path) -> Path:
+ if isinstance(target, Path):
+ target = target._path
+
+ await to_thread.run_sync(self._path.replace, target)
+ return Path(target)
+
+ async def resolve(self, strict: bool = False) -> Path:
+ func = partial(self._path.resolve, strict=strict)
+ return Path(await to_thread.run_sync(func, cancellable=True))
+
+ def rglob(self, pattern: str) -> AsyncIterator[Path]:
+ gen = self._path.rglob(pattern)
+ return _PathIterator(gen)
+
+ async def rmdir(self) -> None:
+ await to_thread.run_sync(self._path.rmdir)
+
+ async def samefile(
+ self, other_path: str | bytes | int | pathlib.Path | Path
+ ) -> bool:
+ if isinstance(other_path, Path):
+ other_path = other_path._path
+
+ return await to_thread.run_sync(
+ self._path.samefile, other_path, cancellable=True
+ )
+
+ async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result:
+ func = partial(os.stat, follow_symlinks=follow_symlinks)
+ return await to_thread.run_sync(func, self._path, cancellable=True)
+
+ async def symlink_to(
+ self,
+ target: str | pathlib.Path | Path,
+ target_is_directory: bool = False,
+ ) -> None:
+ if isinstance(target, Path):
+ target = target._path
+
+ await to_thread.run_sync(self._path.symlink_to, target, target_is_directory)
+
+ async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None:
+ await to_thread.run_sync(self._path.touch, mode, exist_ok)
+
+ async def unlink(self, missing_ok: bool = False) -> None:
+ try:
+ await to_thread.run_sync(self._path.unlink)
+ except FileNotFoundError:
+ if not missing_ok:
+ raise
+
+ def with_name(self, name: str) -> Path:
+ return Path(self._path.with_name(name))
+
+ def with_stem(self, stem: str) -> Path:
+ return Path(self._path.with_name(stem + self._path.suffix))
+
+ def with_suffix(self, suffix: str) -> Path:
+ return Path(self._path.with_suffix(suffix))
+
+ async def write_bytes(self, data: bytes) -> int:
+ return await to_thread.run_sync(self._path.write_bytes, data)
+
+ async def write_text(
+ self,
+ data: str,
+ encoding: str | None = None,
+ errors: str | None = None,
+ newline: str | None = None,
+ ) -> int:
+ # Path.write_text() does not support the "newline" parameter before Python 3.10
+ def sync_write_text() -> int:
+ with self._path.open(
+ "w", encoding=encoding, errors=errors, newline=newline
+ ) as fp:
+ return fp.write(data)
+
+ return await to_thread.run_sync(sync_write_text)
+
+
+PathLike.register(Path)
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/_resources.py b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_resources.py
new file mode 100644
index 00000000..b9a5344a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_resources.py
@@ -0,0 +1,18 @@
+from __future__ import annotations
+
+from ..abc import AsyncResource
+from ._tasks import CancelScope
+
+
+async def aclose_forcefully(resource: AsyncResource) -> None:
+ """
+ Close an asynchronous resource in a cancelled scope.
+
+ Doing this closes the resource without waiting on anything.
+
+ :param resource: the resource to close
+
+ """
+ with CancelScope() as scope:
+ scope.cancel()
+ await resource.aclose()
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/_signals.py b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_signals.py
new file mode 100644
index 00000000..8ea54af8
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_signals.py
@@ -0,0 +1,26 @@
+from __future__ import annotations
+
+from typing import AsyncIterator
+
+from ._compat import DeprecatedAsyncContextManager
+from ._eventloop import get_asynclib
+
+
+def open_signal_receiver(
+ *signals: int,
+) -> DeprecatedAsyncContextManager[AsyncIterator[int]]:
+ """
+ Start receiving operating system signals.
+
+ :param signals: signals to receive (e.g. ``signal.SIGINT``)
+ :return: an asynchronous context manager for an asynchronous iterator which yields signal
+ numbers
+
+ .. warning:: Windows does not support signals natively so it is best to avoid relying on this
+ in cross-platform applications.
+
+ .. warning:: On asyncio, this permanently replaces any previous signal handler for the given
+ signals, as set via :meth:`~asyncio.loop.add_signal_handler`.
+
+ """
+ return get_asynclib().open_signal_receiver(*signals)
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/_sockets.py b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_sockets.py
new file mode 100644
index 00000000..e6970bee
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_sockets.py
@@ -0,0 +1,607 @@
+from __future__ import annotations
+
+import socket
+import ssl
+import sys
+from ipaddress import IPv6Address, ip_address
+from os import PathLike, chmod
+from pathlib import Path
+from socket import AddressFamily, SocketKind
+from typing import Awaitable, List, Tuple, cast, overload
+
+from .. import to_thread
+from ..abc import (
+ ConnectedUDPSocket,
+ IPAddressType,
+ IPSockAddrType,
+ SocketListener,
+ SocketStream,
+ UDPSocket,
+ UNIXSocketStream,
+)
+from ..streams.stapled import MultiListener
+from ..streams.tls import TLSStream
+from ._eventloop import get_asynclib
+from ._resources import aclose_forcefully
+from ._synchronization import Event
+from ._tasks import create_task_group, move_on_after
+
+if sys.version_info >= (3, 8):
+ from typing import Literal
+else:
+ from typing_extensions import Literal
+
+IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515
+
+GetAddrInfoReturnType = List[
+ Tuple[AddressFamily, SocketKind, int, str, Tuple[str, int]]
+]
+AnyIPAddressFamily = Literal[
+ AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6
+]
+IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6]
+
+
+# tls_hostname given
+@overload
+async def connect_tcp(
+ remote_host: IPAddressType,
+ remote_port: int,
+ *,
+ local_host: IPAddressType | None = ...,
+ ssl_context: ssl.SSLContext | None = ...,
+ tls_standard_compatible: bool = ...,
+ tls_hostname: str,
+ happy_eyeballs_delay: float = ...,
+) -> TLSStream:
+ ...
+
+
+# ssl_context given
+@overload
+async def connect_tcp(
+ remote_host: IPAddressType,
+ remote_port: int,
+ *,
+ local_host: IPAddressType | None = ...,
+ ssl_context: ssl.SSLContext,
+ tls_standard_compatible: bool = ...,
+ tls_hostname: str | None = ...,
+ happy_eyeballs_delay: float = ...,
+) -> TLSStream:
+ ...
+
+
+# tls=True
+@overload
+async def connect_tcp(
+ remote_host: IPAddressType,
+ remote_port: int,
+ *,
+ local_host: IPAddressType | None = ...,
+ tls: Literal[True],
+ ssl_context: ssl.SSLContext | None = ...,
+ tls_standard_compatible: bool = ...,
+ tls_hostname: str | None = ...,
+ happy_eyeballs_delay: float = ...,
+) -> TLSStream:
+ ...
+
+
+# tls=False
+@overload
+async def connect_tcp(
+ remote_host: IPAddressType,
+ remote_port: int,
+ *,
+ local_host: IPAddressType | None = ...,
+ tls: Literal[False],
+ ssl_context: ssl.SSLContext | None = ...,
+ tls_standard_compatible: bool = ...,
+ tls_hostname: str | None = ...,
+ happy_eyeballs_delay: float = ...,
+) -> SocketStream:
+ ...
+
+
+# No TLS arguments
+@overload
+async def connect_tcp(
+ remote_host: IPAddressType,
+ remote_port: int,
+ *,
+ local_host: IPAddressType | None = ...,
+ happy_eyeballs_delay: float = ...,
+) -> SocketStream:
+ ...
+
+
+async def connect_tcp(
+ remote_host: IPAddressType,
+ remote_port: int,
+ *,
+ local_host: IPAddressType | None = None,
+ tls: bool = False,
+ ssl_context: ssl.SSLContext | None = None,
+ tls_standard_compatible: bool = True,
+ tls_hostname: str | None = None,
+ happy_eyeballs_delay: float = 0.25,
+) -> SocketStream | TLSStream:
+ """
+ Connect to a host using the TCP protocol.
+
+ This function implements the stateless version of the Happy Eyeballs algorithm (RFC
+ 6555). If ``remote_host`` is a host name that resolves to multiple IP addresses,
+ each one is tried until one connection attempt succeeds. If the first attempt does
+ not connected within 250 milliseconds, a second attempt is started using the next
+ address in the list, and so on. On IPv6 enabled systems, an IPv6 address (if
+ available) is tried first.
+
+ When the connection has been established, a TLS handshake will be done if either
+ ``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``.
+
+ :param remote_host: the IP address or host name to connect to
+ :param remote_port: port on the target host to connect to
+ :param local_host: the interface address or name to bind the socket to before connecting
+ :param tls: ``True`` to do a TLS handshake with the connected stream and return a
+ :class:`~anyio.streams.tls.TLSStream` instead
+ :param ssl_context: the SSL context object to use (if omitted, a default context is created)
+ :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake before closing
+ the stream and requires that the server does this as well. Otherwise,
+ :exc:`~ssl.SSLEOFError` may be raised during reads from the stream.
+ Some protocols, such as HTTP, require this option to be ``False``.
+ See :meth:`~ssl.SSLContext.wrap_socket` for details.
+ :param tls_hostname: host name to check the server certificate against (defaults to the value
+ of ``remote_host``)
+ :param happy_eyeballs_delay: delay (in seconds) before starting the next connection attempt
+ :return: a socket stream object if no TLS handshake was done, otherwise a TLS stream
+ :raises OSError: if the connection attempt fails
+
+ """
+ # Placed here due to https://github.com/python/mypy/issues/7057
+ connected_stream: SocketStream | None = None
+
+ async def try_connect(remote_host: str, event: Event) -> None:
+ nonlocal connected_stream
+ try:
+ stream = await asynclib.connect_tcp(remote_host, remote_port, local_address)
+ except OSError as exc:
+ oserrors.append(exc)
+ return
+ else:
+ if connected_stream is None:
+ connected_stream = stream
+ tg.cancel_scope.cancel()
+ else:
+ await stream.aclose()
+ finally:
+ event.set()
+
+ asynclib = get_asynclib()
+ local_address: IPSockAddrType | None = None
+ family = socket.AF_UNSPEC
+ if local_host:
+ gai_res = await getaddrinfo(str(local_host), None)
+ family, *_, local_address = gai_res[0]
+
+ target_host = str(remote_host)
+ try:
+ addr_obj = ip_address(remote_host)
+ except ValueError:
+ # getaddrinfo() will raise an exception if name resolution fails
+ gai_res = await getaddrinfo(
+ target_host, remote_port, family=family, type=socket.SOCK_STREAM
+ )
+
+ # Organize the list so that the first address is an IPv6 address (if available) and the
+ # second one is an IPv4 addresses. The rest can be in whatever order.
+ v6_found = v4_found = False
+ target_addrs: list[tuple[socket.AddressFamily, str]] = []
+ for af, *rest, sa in gai_res:
+ if af == socket.AF_INET6 and not v6_found:
+ v6_found = True
+ target_addrs.insert(0, (af, sa[0]))
+ elif af == socket.AF_INET and not v4_found and v6_found:
+ v4_found = True
+ target_addrs.insert(1, (af, sa[0]))
+ else:
+ target_addrs.append((af, sa[0]))
+ else:
+ if isinstance(addr_obj, IPv6Address):
+ target_addrs = [(socket.AF_INET6, addr_obj.compressed)]
+ else:
+ target_addrs = [(socket.AF_INET, addr_obj.compressed)]
+
+ oserrors: list[OSError] = []
+ async with create_task_group() as tg:
+ for i, (af, addr) in enumerate(target_addrs):
+ event = Event()
+ tg.start_soon(try_connect, addr, event)
+ with move_on_after(happy_eyeballs_delay):
+ await event.wait()
+
+ if connected_stream is None:
+ cause = oserrors[0] if len(oserrors) == 1 else asynclib.ExceptionGroup(oserrors)
+ raise OSError("All connection attempts failed") from cause
+
+ if tls or tls_hostname or ssl_context:
+ try:
+ return await TLSStream.wrap(
+ connected_stream,
+ server_side=False,
+ hostname=tls_hostname or str(remote_host),
+ ssl_context=ssl_context,
+ standard_compatible=tls_standard_compatible,
+ )
+ except BaseException:
+ await aclose_forcefully(connected_stream)
+ raise
+
+ return connected_stream
+
+
+async def connect_unix(path: str | PathLike[str]) -> UNIXSocketStream:
+ """
+ Connect to the given UNIX socket.
+
+ Not available on Windows.
+
+ :param path: path to the socket
+ :return: a socket stream object
+
+ """
+ path = str(Path(path))
+ return await get_asynclib().connect_unix(path)
+
+
+async def create_tcp_listener(
+ *,
+ local_host: IPAddressType | None = None,
+ local_port: int = 0,
+ family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC,
+ backlog: int = 65536,
+ reuse_port: bool = False,
+) -> MultiListener[SocketStream]:
+ """
+ Create a TCP socket listener.
+
+ :param local_port: port number to listen on
+ :param local_host: IP address of the interface to listen on. If omitted, listen on
+ all IPv4 and IPv6 interfaces. To listen on all interfaces on a specific address
+ family, use ``0.0.0.0`` for IPv4 or ``::`` for IPv6.
+ :param family: address family (used if ``local_host`` was omitted)
+ :param backlog: maximum number of queued incoming connections (up to a maximum of
+ 2**16, or 65536)
+ :param reuse_port: ``True`` to allow multiple sockets to bind to the same
+ address/port (not supported on Windows)
+ :return: a list of listener objects
+
+ """
+ asynclib = get_asynclib()
+ backlog = min(backlog, 65536)
+ local_host = str(local_host) if local_host is not None else None
+ gai_res = await getaddrinfo(
+ local_host, # type: ignore[arg-type]
+ local_port,
+ family=family,
+ type=socket.SocketKind.SOCK_STREAM if sys.platform == "win32" else 0,
+ flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG,
+ )
+ listeners: list[SocketListener] = []
+ try:
+ # The set() is here to work around a glibc bug:
+ # https://sourceware.org/bugzilla/show_bug.cgi?id=14969
+ sockaddr: tuple[str, int] | tuple[str, int, int, int]
+ for fam, kind, *_, sockaddr in sorted(set(gai_res)):
+ # Workaround for an uvloop bug where we don't get the correct scope ID for
+ # IPv6 link-local addresses when passing type=socket.SOCK_STREAM to
+ # getaddrinfo(): https://github.com/MagicStack/uvloop/issues/539
+ if sys.platform != "win32" and kind is not SocketKind.SOCK_STREAM:
+ continue
+
+ raw_socket = socket.socket(fam)
+ raw_socket.setblocking(False)
+
+ # For Windows, enable exclusive address use. For others, enable address reuse.
+ if sys.platform == "win32":
+ raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1)
+ else:
+ raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+
+ if reuse_port:
+ raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
+
+ # If only IPv6 was requested, disable dual stack operation
+ if fam == socket.AF_INET6:
+ raw_socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
+
+ # Workaround for #554
+ if "%" in sockaddr[0]:
+ addr, scope_id = sockaddr[0].split("%", 1)
+ sockaddr = (addr, sockaddr[1], 0, int(scope_id))
+
+ raw_socket.bind(sockaddr)
+ raw_socket.listen(backlog)
+ listener = asynclib.TCPSocketListener(raw_socket)
+ listeners.append(listener)
+ except BaseException:
+ for listener in listeners:
+ await listener.aclose()
+
+ raise
+
+ return MultiListener(listeners)
+
+
+async def create_unix_listener(
+ path: str | PathLike[str],
+ *,
+ mode: int | None = None,
+ backlog: int = 65536,
+) -> SocketListener:
+ """
+ Create a UNIX socket listener.
+
+ Not available on Windows.
+
+ :param path: path of the socket
+ :param mode: permissions to set on the socket
+ :param backlog: maximum number of queued incoming connections (up to a maximum of 2**16, or
+ 65536)
+ :return: a listener object
+
+ .. versionchanged:: 3.0
+ If a socket already exists on the file system in the given path, it will be removed first.
+
+ """
+ path_str = str(path)
+ path = Path(path)
+ if path.is_socket():
+ path.unlink()
+
+ backlog = min(backlog, 65536)
+ raw_socket = socket.socket(socket.AF_UNIX)
+ raw_socket.setblocking(False)
+ try:
+ await to_thread.run_sync(raw_socket.bind, path_str, cancellable=True)
+ if mode is not None:
+ await to_thread.run_sync(chmod, path_str, mode, cancellable=True)
+
+ raw_socket.listen(backlog)
+ return get_asynclib().UNIXSocketListener(raw_socket)
+ except BaseException:
+ raw_socket.close()
+ raise
+
+
+async def create_udp_socket(
+ family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC,
+ *,
+ local_host: IPAddressType | None = None,
+ local_port: int = 0,
+ reuse_port: bool = False,
+) -> UDPSocket:
+ """
+ Create a UDP socket.
+
+ If ``local_port`` has been given, the socket will be bound to this port on the local
+ machine, making this socket suitable for providing UDP based services.
+
+ :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from
+ ``local_host`` if omitted
+ :param local_host: IP address or host name of the local interface to bind to
+ :param local_port: local port to bind to
+ :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port
+ (not supported on Windows)
+ :return: a UDP socket
+
+ """
+ if family is AddressFamily.AF_UNSPEC and not local_host:
+ raise ValueError('Either "family" or "local_host" must be given')
+
+ if local_host:
+ gai_res = await getaddrinfo(
+ str(local_host),
+ local_port,
+ family=family,
+ type=socket.SOCK_DGRAM,
+ flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG,
+ )
+ family = cast(AnyIPAddressFamily, gai_res[0][0])
+ local_address = gai_res[0][-1]
+ elif family is AddressFamily.AF_INET6:
+ local_address = ("::", 0)
+ else:
+ local_address = ("0.0.0.0", 0)
+
+ return await get_asynclib().create_udp_socket(
+ family, local_address, None, reuse_port
+ )
+
+
+async def create_connected_udp_socket(
+ remote_host: IPAddressType,
+ remote_port: int,
+ *,
+ family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC,
+ local_host: IPAddressType | None = None,
+ local_port: int = 0,
+ reuse_port: bool = False,
+) -> ConnectedUDPSocket:
+ """
+ Create a connected UDP socket.
+
+ Connected UDP sockets can only communicate with the specified remote host/port, and any packets
+ sent from other sources are dropped.
+
+ :param remote_host: remote host to set as the default target
+ :param remote_port: port on the remote host to set as the default target
+ :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from
+ ``local_host`` or ``remote_host`` if omitted
+ :param local_host: IP address or host name of the local interface to bind to
+ :param local_port: local port to bind to
+ :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port
+ (not supported on Windows)
+ :return: a connected UDP socket
+
+ """
+ local_address = None
+ if local_host:
+ gai_res = await getaddrinfo(
+ str(local_host),
+ local_port,
+ family=family,
+ type=socket.SOCK_DGRAM,
+ flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG,
+ )
+ family = cast(AnyIPAddressFamily, gai_res[0][0])
+ local_address = gai_res[0][-1]
+
+ gai_res = await getaddrinfo(
+ str(remote_host), remote_port, family=family, type=socket.SOCK_DGRAM
+ )
+ family = cast(AnyIPAddressFamily, gai_res[0][0])
+ remote_address = gai_res[0][-1]
+
+ return await get_asynclib().create_udp_socket(
+ family, local_address, remote_address, reuse_port
+ )
+
+
+async def getaddrinfo(
+ host: bytearray | bytes | str,
+ port: str | int | None,
+ *,
+ family: int | AddressFamily = 0,
+ type: int | SocketKind = 0,
+ proto: int = 0,
+ flags: int = 0,
+) -> GetAddrInfoReturnType:
+ """
+ Look up a numeric IP address given a host name.
+
+ Internationalized domain names are translated according to the (non-transitional) IDNA 2008
+ standard.
+
+ .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of
+ (host, port), unlike what :func:`socket.getaddrinfo` does.
+
+ :param host: host name
+ :param port: port number
+ :param family: socket family (`'AF_INET``, ...)
+ :param type: socket type (``SOCK_STREAM``, ...)
+ :param proto: protocol number
+ :param flags: flags to pass to upstream ``getaddrinfo()``
+ :return: list of tuples containing (family, type, proto, canonname, sockaddr)
+
+ .. seealso:: :func:`socket.getaddrinfo`
+
+ """
+ # Handle unicode hostnames
+ if isinstance(host, str):
+ try:
+ encoded_host = host.encode("ascii")
+ except UnicodeEncodeError:
+ import idna
+
+ encoded_host = idna.encode(host, uts46=True)
+ else:
+ encoded_host = host
+
+ gai_res = await get_asynclib().getaddrinfo(
+ encoded_host, port, family=family, type=type, proto=proto, flags=flags
+ )
+ return [
+ (family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr))
+ for family, type, proto, canonname, sockaddr in gai_res
+ ]
+
+
+def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[tuple[str, str]]:
+ """
+ Look up the host name of an IP address.
+
+ :param sockaddr: socket address (e.g. (ipaddress, port) for IPv4)
+ :param flags: flags to pass to upstream ``getnameinfo()``
+ :return: a tuple of (host name, service name)
+
+ .. seealso:: :func:`socket.getnameinfo`
+
+ """
+ return get_asynclib().getnameinfo(sockaddr, flags)
+
+
+def wait_socket_readable(sock: socket.socket) -> Awaitable[None]:
+ """
+ Wait until the given socket has data to be read.
+
+ This does **NOT** work on Windows when using the asyncio backend with a proactor event loop
+ (default on py3.8+).
+
+ .. warning:: Only use this on raw sockets that have not been wrapped by any higher level
+ constructs like socket streams!
+
+ :param sock: a socket object
+ :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the
+ socket to become readable
+ :raises ~anyio.BusyResourceError: if another task is already waiting for the socket
+ to become readable
+
+ """
+ return get_asynclib().wait_socket_readable(sock)
+
+
+def wait_socket_writable(sock: socket.socket) -> Awaitable[None]:
+ """
+ Wait until the given socket can be written to.
+
+ This does **NOT** work on Windows when using the asyncio backend with a proactor event loop
+ (default on py3.8+).
+
+ .. warning:: Only use this on raw sockets that have not been wrapped by any higher level
+ constructs like socket streams!
+
+ :param sock: a socket object
+ :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the
+ socket to become writable
+ :raises ~anyio.BusyResourceError: if another task is already waiting for the socket
+ to become writable
+
+ """
+ return get_asynclib().wait_socket_writable(sock)
+
+
+#
+# Private API
+#
+
+
+def convert_ipv6_sockaddr(
+ sockaddr: tuple[str, int, int, int] | tuple[str, int]
+) -> tuple[str, int]:
+ """
+ Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format.
+
+ If the scope ID is nonzero, it is added to the address, separated with ``%``.
+ Otherwise the flow id and scope id are simply cut off from the tuple.
+ Any other kinds of socket addresses are returned as-is.
+
+ :param sockaddr: the result of :meth:`~socket.socket.getsockname`
+ :return: the converted socket address
+
+ """
+ # This is more complicated than it should be because of MyPy
+ if isinstance(sockaddr, tuple) and len(sockaddr) == 4:
+ host, port, flowinfo, scope_id = cast(Tuple[str, int, int, int], sockaddr)
+ if scope_id:
+ # PyPy (as of v7.3.11) leaves the interface name in the result, so
+ # we discard it and only get the scope ID from the end
+ # (https://foss.heptapod.net/pypy/pypy/-/issues/3938)
+ host = host.split("%")[0]
+
+ # Add scope_id to the address
+ return f"{host}%{scope_id}", port
+ else:
+ return host, port
+ else:
+ return cast(Tuple[str, int], sockaddr)
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/_streams.py b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_streams.py
new file mode 100644
index 00000000..54ea2b2b
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_streams.py
@@ -0,0 +1,47 @@
+from __future__ import annotations
+
+import math
+from typing import Any, TypeVar, overload
+
+from ..streams.memory import (
+ MemoryObjectReceiveStream,
+ MemoryObjectSendStream,
+ MemoryObjectStreamState,
+)
+
+T_Item = TypeVar("T_Item")
+
+
+@overload
+def create_memory_object_stream(
+ max_buffer_size: float = ...,
+) -> tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]:
+ ...
+
+
+@overload
+def create_memory_object_stream(
+ max_buffer_size: float = ..., item_type: type[T_Item] = ...
+) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]:
+ ...
+
+
+def create_memory_object_stream(
+ max_buffer_size: float = 0, item_type: type[T_Item] | None = None
+) -> tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]:
+ """
+ Create a memory object stream.
+
+ :param max_buffer_size: number of items held in the buffer until ``send()`` starts blocking
+ :param item_type: type of item, for marking the streams with the right generic type for
+ static typing (not used at run time)
+ :return: a tuple of (send stream, receive stream)
+
+ """
+ if max_buffer_size != math.inf and not isinstance(max_buffer_size, int):
+ raise ValueError("max_buffer_size must be either an integer or math.inf")
+ if max_buffer_size < 0:
+ raise ValueError("max_buffer_size cannot be negative")
+
+ state: MemoryObjectStreamState = MemoryObjectStreamState(max_buffer_size)
+ return MemoryObjectSendStream(state), MemoryObjectReceiveStream(state)
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/_subprocesses.py b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_subprocesses.py
new file mode 100644
index 00000000..1a26ac8c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_subprocesses.py
@@ -0,0 +1,135 @@
+from __future__ import annotations
+
+from io import BytesIO
+from os import PathLike
+from subprocess import DEVNULL, PIPE, CalledProcessError, CompletedProcess
+from typing import (
+ IO,
+ Any,
+ AsyncIterable,
+ Mapping,
+ Sequence,
+ cast,
+)
+
+from ..abc import Process
+from ._eventloop import get_asynclib
+from ._tasks import create_task_group
+
+
+async def run_process(
+ command: str | bytes | Sequence[str | bytes],
+ *,
+ input: bytes | None = None,
+ stdout: int | IO[Any] | None = PIPE,
+ stderr: int | IO[Any] | None = PIPE,
+ check: bool = True,
+ cwd: str | bytes | PathLike[str] | None = None,
+ env: Mapping[str, str] | None = None,
+ start_new_session: bool = False,
+) -> CompletedProcess[bytes]:
+ """
+ Run an external command in a subprocess and wait until it completes.
+
+ .. seealso:: :func:`subprocess.run`
+
+ :param command: either a string to pass to the shell, or an iterable of strings containing the
+ executable name or path and its arguments
+ :param input: bytes passed to the standard input of the subprocess
+ :param stdout: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL`
+ :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL` or
+ :data:`subprocess.STDOUT`
+ :param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the process
+ terminates with a return code other than 0
+ :param cwd: If not ``None``, change the working directory to this before running the command
+ :param env: if not ``None``, this mapping replaces the inherited environment variables from the
+ parent process
+ :param start_new_session: if ``true`` the setsid() system call will be made in the child
+ process prior to the execution of the subprocess. (POSIX only)
+ :return: an object representing the completed process
+ :raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process exits with a
+ nonzero return code
+
+ """
+
+ async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None:
+ buffer = BytesIO()
+ async for chunk in stream:
+ buffer.write(chunk)
+
+ stream_contents[index] = buffer.getvalue()
+
+ async with await open_process(
+ command,
+ stdin=PIPE if input else DEVNULL,
+ stdout=stdout,
+ stderr=stderr,
+ cwd=cwd,
+ env=env,
+ start_new_session=start_new_session,
+ ) as process:
+ stream_contents: list[bytes | None] = [None, None]
+ try:
+ async with create_task_group() as tg:
+ if process.stdout:
+ tg.start_soon(drain_stream, process.stdout, 0)
+ if process.stderr:
+ tg.start_soon(drain_stream, process.stderr, 1)
+ if process.stdin and input:
+ await process.stdin.send(input)
+ await process.stdin.aclose()
+
+ await process.wait()
+ except BaseException:
+ process.kill()
+ raise
+
+ output, errors = stream_contents
+ if check and process.returncode != 0:
+ raise CalledProcessError(cast(int, process.returncode), command, output, errors)
+
+ return CompletedProcess(command, cast(int, process.returncode), output, errors)
+
+
+async def open_process(
+ command: str | bytes | Sequence[str | bytes],
+ *,
+ stdin: int | IO[Any] | None = PIPE,
+ stdout: int | IO[Any] | None = PIPE,
+ stderr: int | IO[Any] | None = PIPE,
+ cwd: str | bytes | PathLike[str] | None = None,
+ env: Mapping[str, str] | None = None,
+ start_new_session: bool = False,
+) -> Process:
+ """
+ Start an external command in a subprocess.
+
+ .. seealso:: :class:`subprocess.Popen`
+
+ :param command: either a string to pass to the shell, or an iterable of strings containing the
+ executable name or path and its arguments
+ :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a
+ file-like object, or ``None``
+ :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
+ a file-like object, or ``None``
+ :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
+ :data:`subprocess.STDOUT`, a file-like object, or ``None``
+ :param cwd: If not ``None``, the working directory is changed before executing
+ :param env: If env is not ``None``, it must be a mapping that defines the environment
+ variables for the new process
+ :param start_new_session: if ``true`` the setsid() system call will be made in the child
+ process prior to the execution of the subprocess. (POSIX only)
+ :return: an asynchronous process object
+
+ """
+ shell = isinstance(command, str)
+ return await get_asynclib().open_process(
+ command,
+ shell=shell,
+ stdin=stdin,
+ stdout=stdout,
+ stderr=stderr,
+ cwd=cwd,
+ env=env,
+ start_new_session=start_new_session,
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/_synchronization.py b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_synchronization.py
new file mode 100644
index 00000000..783570c7
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_synchronization.py
@@ -0,0 +1,596 @@
+from __future__ import annotations
+
+from collections import deque
+from dataclasses import dataclass
+from types import TracebackType
+from warnings import warn
+
+from ..lowlevel import cancel_shielded_checkpoint, checkpoint, checkpoint_if_cancelled
+from ._compat import DeprecatedAwaitable
+from ._eventloop import get_asynclib
+from ._exceptions import BusyResourceError, WouldBlock
+from ._tasks import CancelScope
+from ._testing import TaskInfo, get_current_task
+
+
+@dataclass(frozen=True)
+class EventStatistics:
+ """
+ :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait`
+ """
+
+ tasks_waiting: int
+
+
+@dataclass(frozen=True)
+class CapacityLimiterStatistics:
+ """
+ :ivar int borrowed_tokens: number of tokens currently borrowed by tasks
+ :ivar float total_tokens: total number of available tokens
+ :ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from this
+ limiter
+ :ivar int tasks_waiting: number of tasks waiting on :meth:`~.CapacityLimiter.acquire` or
+ :meth:`~.CapacityLimiter.acquire_on_behalf_of`
+ """
+
+ borrowed_tokens: int
+ total_tokens: float
+ borrowers: tuple[object, ...]
+ tasks_waiting: int
+
+
+@dataclass(frozen=True)
+class LockStatistics:
+ """
+ :ivar bool locked: flag indicating if this lock is locked or not
+ :ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the lock is not
+ held by any task)
+ :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire`
+ """
+
+ locked: bool
+ owner: TaskInfo | None
+ tasks_waiting: int
+
+
+@dataclass(frozen=True)
+class ConditionStatistics:
+ """
+ :ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait`
+ :ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying :class:`~.Lock`
+ """
+
+ tasks_waiting: int
+ lock_statistics: LockStatistics
+
+
+@dataclass(frozen=True)
+class SemaphoreStatistics:
+ """
+ :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire`
+
+ """
+
+ tasks_waiting: int
+
+
+class Event:
+ def __new__(cls) -> Event:
+ return get_asynclib().Event()
+
+ def set(self) -> DeprecatedAwaitable:
+ """Set the flag, notifying all listeners."""
+ raise NotImplementedError
+
+ def is_set(self) -> bool:
+ """Return ``True`` if the flag is set, ``False`` if not."""
+ raise NotImplementedError
+
+ async def wait(self) -> None:
+ """
+ Wait until the flag has been set.
+
+ If the flag has already been set when this method is called, it returns immediately.
+
+ """
+ raise NotImplementedError
+
+ def statistics(self) -> EventStatistics:
+ """Return statistics about the current state of this event."""
+ raise NotImplementedError
+
+
+class Lock:
+ _owner_task: TaskInfo | None = None
+
+ def __init__(self) -> None:
+ self._waiters: deque[tuple[TaskInfo, Event]] = deque()
+
+ async def __aenter__(self) -> None:
+ await self.acquire()
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> None:
+ self.release()
+
+ async def acquire(self) -> None:
+ """Acquire the lock."""
+ await checkpoint_if_cancelled()
+ try:
+ self.acquire_nowait()
+ except WouldBlock:
+ task = get_current_task()
+ event = Event()
+ token = task, event
+ self._waiters.append(token)
+ try:
+ await event.wait()
+ except BaseException:
+ if not event.is_set():
+ self._waiters.remove(token)
+ elif self._owner_task == task:
+ self.release()
+
+ raise
+
+ assert self._owner_task == task
+ else:
+ try:
+ await cancel_shielded_checkpoint()
+ except BaseException:
+ self.release()
+ raise
+
+ def acquire_nowait(self) -> None:
+ """
+ Acquire the lock, without blocking.
+
+ :raises ~anyio.WouldBlock: if the operation would block
+
+ """
+ task = get_current_task()
+ if self._owner_task == task:
+ raise RuntimeError("Attempted to acquire an already held Lock")
+
+ if self._owner_task is not None:
+ raise WouldBlock
+
+ self._owner_task = task
+
+ def release(self) -> DeprecatedAwaitable:
+ """Release the lock."""
+ if self._owner_task != get_current_task():
+ raise RuntimeError("The current task is not holding this lock")
+
+ if self._waiters:
+ self._owner_task, event = self._waiters.popleft()
+ event.set()
+ else:
+ del self._owner_task
+
+ return DeprecatedAwaitable(self.release)
+
+ def locked(self) -> bool:
+ """Return True if the lock is currently held."""
+ return self._owner_task is not None
+
+ def statistics(self) -> LockStatistics:
+ """
+ Return statistics about the current state of this lock.
+
+ .. versionadded:: 3.0
+ """
+ return LockStatistics(self.locked(), self._owner_task, len(self._waiters))
+
+
+class Condition:
+ _owner_task: TaskInfo | None = None
+
+ def __init__(self, lock: Lock | None = None):
+ self._lock = lock or Lock()
+ self._waiters: deque[Event] = deque()
+
+ async def __aenter__(self) -> None:
+ await self.acquire()
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> None:
+ self.release()
+
+ def _check_acquired(self) -> None:
+ if self._owner_task != get_current_task():
+ raise RuntimeError("The current task is not holding the underlying lock")
+
+ async def acquire(self) -> None:
+ """Acquire the underlying lock."""
+ await self._lock.acquire()
+ self._owner_task = get_current_task()
+
+ def acquire_nowait(self) -> None:
+ """
+ Acquire the underlying lock, without blocking.
+
+ :raises ~anyio.WouldBlock: if the operation would block
+
+ """
+ self._lock.acquire_nowait()
+ self._owner_task = get_current_task()
+
+ def release(self) -> DeprecatedAwaitable:
+ """Release the underlying lock."""
+ self._lock.release()
+ return DeprecatedAwaitable(self.release)
+
+ def locked(self) -> bool:
+ """Return True if the lock is set."""
+ return self._lock.locked()
+
+ def notify(self, n: int = 1) -> None:
+ """Notify exactly n listeners."""
+ self._check_acquired()
+ for _ in range(n):
+ try:
+ event = self._waiters.popleft()
+ except IndexError:
+ break
+
+ event.set()
+
+ def notify_all(self) -> None:
+ """Notify all the listeners."""
+ self._check_acquired()
+ for event in self._waiters:
+ event.set()
+
+ self._waiters.clear()
+
+ async def wait(self) -> None:
+ """Wait for a notification."""
+ await checkpoint()
+ event = Event()
+ self._waiters.append(event)
+ self.release()
+ try:
+ await event.wait()
+ except BaseException:
+ if not event.is_set():
+ self._waiters.remove(event)
+
+ raise
+ finally:
+ with CancelScope(shield=True):
+ await self.acquire()
+
+ def statistics(self) -> ConditionStatistics:
+ """
+ Return statistics about the current state of this condition.
+
+ .. versionadded:: 3.0
+ """
+ return ConditionStatistics(len(self._waiters), self._lock.statistics())
+
+
+class Semaphore:
+ def __init__(self, initial_value: int, *, max_value: int | None = None):
+ if not isinstance(initial_value, int):
+ raise TypeError("initial_value must be an integer")
+ if initial_value < 0:
+ raise ValueError("initial_value must be >= 0")
+ if max_value is not None:
+ if not isinstance(max_value, int):
+ raise TypeError("max_value must be an integer or None")
+ if max_value < initial_value:
+ raise ValueError(
+ "max_value must be equal to or higher than initial_value"
+ )
+
+ self._value = initial_value
+ self._max_value = max_value
+ self._waiters: deque[Event] = deque()
+
+ async def __aenter__(self) -> Semaphore:
+ await self.acquire()
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> None:
+ self.release()
+
+ async def acquire(self) -> None:
+ """Decrement the semaphore value, blocking if necessary."""
+ await checkpoint_if_cancelled()
+ try:
+ self.acquire_nowait()
+ except WouldBlock:
+ event = Event()
+ self._waiters.append(event)
+ try:
+ await event.wait()
+ except BaseException:
+ if not event.is_set():
+ self._waiters.remove(event)
+ else:
+ self.release()
+
+ raise
+ else:
+ try:
+ await cancel_shielded_checkpoint()
+ except BaseException:
+ self.release()
+ raise
+
+ def acquire_nowait(self) -> None:
+ """
+ Acquire the underlying lock, without blocking.
+
+ :raises ~anyio.WouldBlock: if the operation would block
+
+ """
+ if self._value == 0:
+ raise WouldBlock
+
+ self._value -= 1
+
+ def release(self) -> DeprecatedAwaitable:
+ """Increment the semaphore value."""
+ if self._max_value is not None and self._value == self._max_value:
+ raise ValueError("semaphore released too many times")
+
+ if self._waiters:
+ self._waiters.popleft().set()
+ else:
+ self._value += 1
+
+ return DeprecatedAwaitable(self.release)
+
+ @property
+ def value(self) -> int:
+ """The current value of the semaphore."""
+ return self._value
+
+ @property
+ def max_value(self) -> int | None:
+ """The maximum value of the semaphore."""
+ return self._max_value
+
+ def statistics(self) -> SemaphoreStatistics:
+ """
+ Return statistics about the current state of this semaphore.
+
+ .. versionadded:: 3.0
+ """
+ return SemaphoreStatistics(len(self._waiters))
+
+
+class CapacityLimiter:
+ def __new__(cls, total_tokens: float) -> CapacityLimiter:
+ return get_asynclib().CapacityLimiter(total_tokens)
+
+ async def __aenter__(self) -> None:
+ raise NotImplementedError
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> bool | None:
+ raise NotImplementedError
+
+ @property
+ def total_tokens(self) -> float:
+ """
+ The total number of tokens available for borrowing.
+
+ This is a read-write property. If the total number of tokens is increased, the
+ proportionate number of tasks waiting on this limiter will be granted their tokens.
+
+ .. versionchanged:: 3.0
+ The property is now writable.
+
+ """
+ raise NotImplementedError
+
+ @total_tokens.setter
+ def total_tokens(self, value: float) -> None:
+ raise NotImplementedError
+
+ async def set_total_tokens(self, value: float) -> None:
+ warn(
+ "CapacityLimiter.set_total_tokens has been deprecated. Set the value of the"
+ '"total_tokens" attribute directly.',
+ DeprecationWarning,
+ )
+ self.total_tokens = value
+
+ @property
+ def borrowed_tokens(self) -> int:
+ """The number of tokens that have currently been borrowed."""
+ raise NotImplementedError
+
+ @property
+ def available_tokens(self) -> float:
+ """The number of tokens currently available to be borrowed"""
+ raise NotImplementedError
+
+ def acquire_nowait(self) -> DeprecatedAwaitable:
+ """
+ Acquire a token for the current task without waiting for one to become available.
+
+ :raises ~anyio.WouldBlock: if there are no tokens available for borrowing
+
+ """
+ raise NotImplementedError
+
+ def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable:
+ """
+ Acquire a token without waiting for one to become available.
+
+ :param borrower: the entity borrowing a token
+ :raises ~anyio.WouldBlock: if there are no tokens available for borrowing
+
+ """
+ raise NotImplementedError
+
+ async def acquire(self) -> None:
+ """
+ Acquire a token for the current task, waiting if necessary for one to become available.
+
+ """
+ raise NotImplementedError
+
+ async def acquire_on_behalf_of(self, borrower: object) -> None:
+ """
+ Acquire a token, waiting if necessary for one to become available.
+
+ :param borrower: the entity borrowing a token
+
+ """
+ raise NotImplementedError
+
+ def release(self) -> None:
+ """
+ Release the token held by the current task.
+ :raises RuntimeError: if the current task has not borrowed a token from this limiter.
+
+ """
+ raise NotImplementedError
+
+ def release_on_behalf_of(self, borrower: object) -> None:
+ """
+ Release the token held by the given borrower.
+
+ :raises RuntimeError: if the borrower has not borrowed a token from this limiter.
+
+ """
+ raise NotImplementedError
+
+ def statistics(self) -> CapacityLimiterStatistics:
+ """
+ Return statistics about the current state of this limiter.
+
+ .. versionadded:: 3.0
+
+ """
+ raise NotImplementedError
+
+
+def create_lock() -> Lock:
+ """
+ Create an asynchronous lock.
+
+ :return: a lock object
+
+ .. deprecated:: 3.0
+ Use :class:`~Lock` directly.
+
+ """
+ warn("create_lock() is deprecated -- use Lock() directly", DeprecationWarning)
+ return Lock()
+
+
+def create_condition(lock: Lock | None = None) -> Condition:
+ """
+ Create an asynchronous condition.
+
+ :param lock: the lock to base the condition object on
+ :return: a condition object
+
+ .. deprecated:: 3.0
+ Use :class:`~Condition` directly.
+
+ """
+ warn(
+ "create_condition() is deprecated -- use Condition() directly",
+ DeprecationWarning,
+ )
+ return Condition(lock=lock)
+
+
+def create_event() -> Event:
+ """
+ Create an asynchronous event object.
+
+ :return: an event object
+
+ .. deprecated:: 3.0
+ Use :class:`~Event` directly.
+
+ """
+ warn("create_event() is deprecated -- use Event() directly", DeprecationWarning)
+ return get_asynclib().Event()
+
+
+def create_semaphore(value: int, *, max_value: int | None = None) -> Semaphore:
+ """
+ Create an asynchronous semaphore.
+
+ :param value: the semaphore's initial value
+ :param max_value: if set, makes this a "bounded" semaphore that raises :exc:`ValueError` if the
+ semaphore's value would exceed this number
+ :return: a semaphore object
+
+ .. deprecated:: 3.0
+ Use :class:`~Semaphore` directly.
+
+ """
+ warn(
+ "create_semaphore() is deprecated -- use Semaphore() directly",
+ DeprecationWarning,
+ )
+ return Semaphore(value, max_value=max_value)
+
+
+def create_capacity_limiter(total_tokens: float) -> CapacityLimiter:
+ """
+ Create a capacity limiter.
+
+ :param total_tokens: the total number of tokens available for borrowing (can be an integer or
+ :data:`math.inf`)
+ :return: a capacity limiter object
+
+ .. deprecated:: 3.0
+ Use :class:`~CapacityLimiter` directly.
+
+ """
+ warn(
+ "create_capacity_limiter() is deprecated -- use CapacityLimiter() directly",
+ DeprecationWarning,
+ )
+ return get_asynclib().CapacityLimiter(total_tokens)
+
+
+class ResourceGuard:
+ __slots__ = "action", "_guarded"
+
+ def __init__(self, action: str):
+ self.action = action
+ self._guarded = False
+
+ def __enter__(self) -> None:
+ if self._guarded:
+ raise BusyResourceError(self.action)
+
+ self._guarded = True
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> bool | None:
+ self._guarded = False
+ return None
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/_tasks.py b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_tasks.py
new file mode 100644
index 00000000..e9d9c2bd
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_tasks.py
@@ -0,0 +1,180 @@
+from __future__ import annotations
+
+import math
+from types import TracebackType
+from warnings import warn
+
+from ..abc._tasks import TaskGroup, TaskStatus
+from ._compat import (
+ DeprecatedAsyncContextManager,
+ DeprecatedAwaitable,
+ DeprecatedAwaitableFloat,
+)
+from ._eventloop import get_asynclib
+
+
+class _IgnoredTaskStatus(TaskStatus[object]):
+ def started(self, value: object = None) -> None:
+ pass
+
+
+TASK_STATUS_IGNORED = _IgnoredTaskStatus()
+
+
+class CancelScope(DeprecatedAsyncContextManager["CancelScope"]):
+ """
+ Wraps a unit of work that can be made separately cancellable.
+
+ :param deadline: The time (clock value) when this scope is cancelled automatically
+ :param shield: ``True`` to shield the cancel scope from external cancellation
+ """
+
+ def __new__(
+ cls, *, deadline: float = math.inf, shield: bool = False
+ ) -> CancelScope:
+ return get_asynclib().CancelScope(shield=shield, deadline=deadline)
+
+ def cancel(self) -> DeprecatedAwaitable:
+ """Cancel this scope immediately."""
+ raise NotImplementedError
+
+ @property
+ def deadline(self) -> float:
+ """
+ The time (clock value) when this scope is cancelled automatically.
+
+ Will be ``float('inf')`` if no timeout has been set.
+
+ """
+ raise NotImplementedError
+
+ @deadline.setter
+ def deadline(self, value: float) -> None:
+ raise NotImplementedError
+
+ @property
+ def cancel_called(self) -> bool:
+ """``True`` if :meth:`cancel` has been called."""
+ raise NotImplementedError
+
+ @property
+ def shield(self) -> bool:
+ """
+ ``True`` if this scope is shielded from external cancellation.
+
+ While a scope is shielded, it will not receive cancellations from outside.
+
+ """
+ raise NotImplementedError
+
+ @shield.setter
+ def shield(self, value: bool) -> None:
+ raise NotImplementedError
+
+ def __enter__(self) -> CancelScope:
+ raise NotImplementedError
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> bool | None:
+ raise NotImplementedError
+
+
+def open_cancel_scope(*, shield: bool = False) -> CancelScope:
+ """
+ Open a cancel scope.
+
+ :param shield: ``True`` to shield the cancel scope from external cancellation
+ :return: a cancel scope
+
+ .. deprecated:: 3.0
+ Use :class:`~CancelScope` directly.
+
+ """
+ warn(
+ "open_cancel_scope() is deprecated -- use CancelScope() directly",
+ DeprecationWarning,
+ )
+ return get_asynclib().CancelScope(shield=shield)
+
+
+class FailAfterContextManager(DeprecatedAsyncContextManager[CancelScope]):
+ def __init__(self, cancel_scope: CancelScope):
+ self._cancel_scope = cancel_scope
+
+ def __enter__(self) -> CancelScope:
+ return self._cancel_scope.__enter__()
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> bool | None:
+ retval = self._cancel_scope.__exit__(exc_type, exc_val, exc_tb)
+ if self._cancel_scope.cancel_called:
+ raise TimeoutError
+
+ return retval
+
+
+def fail_after(delay: float | None, shield: bool = False) -> FailAfterContextManager:
+ """
+ Create a context manager which raises a :class:`TimeoutError` if does not finish in time.
+
+ :param delay: maximum allowed time (in seconds) before raising the exception, or ``None`` to
+ disable the timeout
+ :param shield: ``True`` to shield the cancel scope from external cancellation
+ :return: a context manager that yields a cancel scope
+ :rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.CancelScope`\\]
+
+ """
+ deadline = (
+ (get_asynclib().current_time() + delay) if delay is not None else math.inf
+ )
+ cancel_scope = get_asynclib().CancelScope(deadline=deadline, shield=shield)
+ return FailAfterContextManager(cancel_scope)
+
+
+def move_on_after(delay: float | None, shield: bool = False) -> CancelScope:
+ """
+ Create a cancel scope with a deadline that expires after the given delay.
+
+ :param delay: maximum allowed time (in seconds) before exiting the context block, or ``None``
+ to disable the timeout
+ :param shield: ``True`` to shield the cancel scope from external cancellation
+ :return: a cancel scope
+
+ """
+ deadline = (
+ (get_asynclib().current_time() + delay) if delay is not None else math.inf
+ )
+ return get_asynclib().CancelScope(deadline=deadline, shield=shield)
+
+
+def current_effective_deadline() -> DeprecatedAwaitableFloat:
+ """
+ Return the nearest deadline among all the cancel scopes effective for the current task.
+
+ :return: a clock value from the event loop's internal clock (or ``float('inf')`` if
+ there is no deadline in effect, or ``float('-inf')`` if the current scope has
+ been cancelled)
+ :rtype: float
+
+ """
+ return DeprecatedAwaitableFloat(
+ get_asynclib().current_effective_deadline(), current_effective_deadline
+ )
+
+
+def create_task_group() -> TaskGroup:
+ """
+ Create a task group.
+
+ :return: a task group
+
+ """
+ return get_asynclib().TaskGroup()
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/_testing.py b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_testing.py
new file mode 100644
index 00000000..c8191b38
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_testing.py
@@ -0,0 +1,82 @@
+from __future__ import annotations
+
+from typing import Any, Awaitable, Generator
+
+from ._compat import DeprecatedAwaitableList, _warn_deprecation
+from ._eventloop import get_asynclib
+
+
+class TaskInfo:
+ """
+ Represents an asynchronous task.
+
+ :ivar int id: the unique identifier of the task
+ :ivar parent_id: the identifier of the parent task, if any
+ :vartype parent_id: Optional[int]
+ :ivar str name: the description of the task (if any)
+ :ivar ~collections.abc.Coroutine coro: the coroutine object of the task
+ """
+
+ __slots__ = "_name", "id", "parent_id", "name", "coro"
+
+ def __init__(
+ self,
+ id: int,
+ parent_id: int | None,
+ name: str | None,
+ coro: Generator[Any, Any, Any] | Awaitable[Any],
+ ):
+ func = get_current_task
+ self._name = f"{func.__module__}.{func.__qualname__}"
+ self.id: int = id
+ self.parent_id: int | None = parent_id
+ self.name: str | None = name
+ self.coro: Generator[Any, Any, Any] | Awaitable[Any] = coro
+
+ def __eq__(self, other: object) -> bool:
+ if isinstance(other, TaskInfo):
+ return self.id == other.id
+
+ return NotImplemented
+
+ def __hash__(self) -> int:
+ return hash(self.id)
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}(id={self.id!r}, name={self.name!r})"
+
+ def __await__(self) -> Generator[None, None, TaskInfo]:
+ _warn_deprecation(self)
+ if False:
+ yield
+
+ return self
+
+ def _unwrap(self) -> TaskInfo:
+ return self
+
+
+def get_current_task() -> TaskInfo:
+ """
+ Return the current task.
+
+ :return: a representation of the current task
+
+ """
+ return get_asynclib().get_current_task()
+
+
+def get_running_tasks() -> DeprecatedAwaitableList[TaskInfo]:
+ """
+ Return a list of running tasks in the current event loop.
+
+ :return: a list of task info objects
+
+ """
+ tasks = get_asynclib().get_running_tasks()
+ return DeprecatedAwaitableList(tasks, func=get_running_tasks)
+
+
+async def wait_all_tasks_blocked() -> None:
+ """Wait until all other tasks are waiting for something."""
+ await get_asynclib().wait_all_tasks_blocked()
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/_core/_typedattr.py b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_typedattr.py
new file mode 100644
index 00000000..bf9202ee
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/_core/_typedattr.py
@@ -0,0 +1,83 @@
+from __future__ import annotations
+
+import sys
+from typing import Any, Callable, Mapping, TypeVar, overload
+
+from ._exceptions import TypedAttributeLookupError
+
+if sys.version_info >= (3, 8):
+ from typing import final
+else:
+ from typing_extensions import final
+
+T_Attr = TypeVar("T_Attr")
+T_Default = TypeVar("T_Default")
+undefined = object()
+
+
+def typed_attribute() -> Any:
+ """Return a unique object, used to mark typed attributes."""
+ return object()
+
+
+class TypedAttributeSet:
+ """
+ Superclass for typed attribute collections.
+
+ Checks that every public attribute of every subclass has a type annotation.
+ """
+
+ def __init_subclass__(cls) -> None:
+ annotations: dict[str, Any] = getattr(cls, "__annotations__", {})
+ for attrname in dir(cls):
+ if not attrname.startswith("_") and attrname not in annotations:
+ raise TypeError(
+ f"Attribute {attrname!r} is missing its type annotation"
+ )
+
+ super().__init_subclass__()
+
+
+class TypedAttributeProvider:
+ """Base class for classes that wish to provide typed extra attributes."""
+
+ @property
+ def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]:
+ """
+ A mapping of the extra attributes to callables that return the corresponding values.
+
+ If the provider wraps another provider, the attributes from that wrapper should also be
+ included in the returned mapping (but the wrapper may override the callables from the
+ wrapped instance).
+
+ """
+ return {}
+
+ @overload
+ def extra(self, attribute: T_Attr) -> T_Attr:
+ ...
+
+ @overload
+ def extra(self, attribute: T_Attr, default: T_Default) -> T_Attr | T_Default:
+ ...
+
+ @final
+ def extra(self, attribute: Any, default: object = undefined) -> object:
+ """
+ extra(attribute, default=undefined)
+
+ Return the value of the given typed extra attribute.
+
+ :param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to look for
+ :param default: the value that should be returned if no value is found for the attribute
+ :raises ~anyio.TypedAttributeLookupError: if the search failed and no default value was
+ given
+
+ """
+ try:
+ return self.extra_attributes[attribute]()
+ except KeyError:
+ if default is undefined:
+ raise TypedAttributeLookupError("Attribute not found") from None
+ else:
+ return default
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__init__.py b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__init__.py
new file mode 100644
index 00000000..72c34e54
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__init__.py
@@ -0,0 +1,90 @@
+from __future__ import annotations
+
+__all__ = (
+ "AsyncResource",
+ "IPAddressType",
+ "IPSockAddrType",
+ "SocketAttribute",
+ "SocketStream",
+ "SocketListener",
+ "UDPSocket",
+ "UNIXSocketStream",
+ "UDPPacketType",
+ "ConnectedUDPSocket",
+ "UnreliableObjectReceiveStream",
+ "UnreliableObjectSendStream",
+ "UnreliableObjectStream",
+ "ObjectReceiveStream",
+ "ObjectSendStream",
+ "ObjectStream",
+ "ByteReceiveStream",
+ "ByteSendStream",
+ "ByteStream",
+ "AnyUnreliableByteReceiveStream",
+ "AnyUnreliableByteSendStream",
+ "AnyUnreliableByteStream",
+ "AnyByteReceiveStream",
+ "AnyByteSendStream",
+ "AnyByteStream",
+ "Listener",
+ "Process",
+ "Event",
+ "Condition",
+ "Lock",
+ "Semaphore",
+ "CapacityLimiter",
+ "CancelScope",
+ "TaskGroup",
+ "TaskStatus",
+ "TestRunner",
+ "BlockingPortal",
+)
+
+from typing import Any
+
+from ._resources import AsyncResource
+from ._sockets import (
+ ConnectedUDPSocket,
+ IPAddressType,
+ IPSockAddrType,
+ SocketAttribute,
+ SocketListener,
+ SocketStream,
+ UDPPacketType,
+ UDPSocket,
+ UNIXSocketStream,
+)
+from ._streams import (
+ AnyByteReceiveStream,
+ AnyByteSendStream,
+ AnyByteStream,
+ AnyUnreliableByteReceiveStream,
+ AnyUnreliableByteSendStream,
+ AnyUnreliableByteStream,
+ ByteReceiveStream,
+ ByteSendStream,
+ ByteStream,
+ Listener,
+ ObjectReceiveStream,
+ ObjectSendStream,
+ ObjectStream,
+ UnreliableObjectReceiveStream,
+ UnreliableObjectSendStream,
+ UnreliableObjectStream,
+)
+from ._subprocesses import Process
+from ._tasks import TaskGroup, TaskStatus
+from ._testing import TestRunner
+
+# Re-exported here, for backwards compatibility
+# isort: off
+from .._core._synchronization import CapacityLimiter, Condition, Event, Lock, Semaphore
+from .._core._tasks import CancelScope
+from ..from_thread import BlockingPortal
+
+# Re-export imports so they look like they live directly in this package
+key: str
+value: Any
+for key, value in list(locals().items()):
+ if getattr(value, "__module__", "").startswith("anyio.abc."):
+ value.__module__ = __name__
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..996a0ff2
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc
new file mode 100644
index 00000000..19e9b749
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc
new file mode 100644
index 00000000..3eed92b4
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc
new file mode 100644
index 00000000..1c6c35ac
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc
new file mode 100644
index 00000000..f5876557
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc
new file mode 100644
index 00000000..6506a893
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc
new file mode 100644
index 00000000..92167213
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/_resources.py b/Backend/venv/lib/python3.12/site-packages/anyio/abc/_resources.py
new file mode 100644
index 00000000..e0a283fc
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/abc/_resources.py
@@ -0,0 +1,31 @@
+from __future__ import annotations
+
+from abc import ABCMeta, abstractmethod
+from types import TracebackType
+from typing import TypeVar
+
+T = TypeVar("T")
+
+
+class AsyncResource(metaclass=ABCMeta):
+ """
+ Abstract base class for all closeable asynchronous resources.
+
+ Works as an asynchronous context manager which returns the instance itself on enter, and calls
+ :meth:`aclose` on exit.
+ """
+
+ async def __aenter__(self: T) -> T:
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> None:
+ await self.aclose()
+
+ @abstractmethod
+ async def aclose(self) -> None:
+ """Close the resource."""
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/_sockets.py b/Backend/venv/lib/python3.12/site-packages/anyio/abc/_sockets.py
new file mode 100644
index 00000000..6aac5f7c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/abc/_sockets.py
@@ -0,0 +1,160 @@
+from __future__ import annotations
+
+import socket
+from abc import abstractmethod
+from contextlib import AsyncExitStack
+from io import IOBase
+from ipaddress import IPv4Address, IPv6Address
+from socket import AddressFamily
+from typing import (
+ Any,
+ Callable,
+ Collection,
+ Mapping,
+ Tuple,
+ TypeVar,
+ Union,
+)
+
+from .._core._tasks import create_task_group
+from .._core._typedattr import (
+ TypedAttributeProvider,
+ TypedAttributeSet,
+ typed_attribute,
+)
+from ._streams import ByteStream, Listener, UnreliableObjectStream
+from ._tasks import TaskGroup
+
+IPAddressType = Union[str, IPv4Address, IPv6Address]
+IPSockAddrType = Tuple[str, int]
+SockAddrType = Union[IPSockAddrType, str]
+UDPPacketType = Tuple[bytes, IPSockAddrType]
+T_Retval = TypeVar("T_Retval")
+
+
+class SocketAttribute(TypedAttributeSet):
+ #: the address family of the underlying socket
+ family: AddressFamily = typed_attribute()
+ #: the local socket address of the underlying socket
+ local_address: SockAddrType = typed_attribute()
+ #: for IP addresses, the local port the underlying socket is bound to
+ local_port: int = typed_attribute()
+ #: the underlying stdlib socket object
+ raw_socket: socket.socket = typed_attribute()
+ #: the remote address the underlying socket is connected to
+ remote_address: SockAddrType = typed_attribute()
+ #: for IP addresses, the remote port the underlying socket is connected to
+ remote_port: int = typed_attribute()
+
+
+class _SocketProvider(TypedAttributeProvider):
+ @property
+ def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
+ from .._core._sockets import convert_ipv6_sockaddr as convert
+
+ attributes: dict[Any, Callable[[], Any]] = {
+ SocketAttribute.family: lambda: self._raw_socket.family,
+ SocketAttribute.local_address: lambda: convert(
+ self._raw_socket.getsockname()
+ ),
+ SocketAttribute.raw_socket: lambda: self._raw_socket,
+ }
+ try:
+ peername: tuple[str, int] | None = convert(self._raw_socket.getpeername())
+ except OSError:
+ peername = None
+
+ # Provide the remote address for connected sockets
+ if peername is not None:
+ attributes[SocketAttribute.remote_address] = lambda: peername
+
+ # Provide local and remote ports for IP based sockets
+ if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6):
+ attributes[
+ SocketAttribute.local_port
+ ] = lambda: self._raw_socket.getsockname()[1]
+ if peername is not None:
+ remote_port = peername[1]
+ attributes[SocketAttribute.remote_port] = lambda: remote_port
+
+ return attributes
+
+ @property
+ @abstractmethod
+ def _raw_socket(self) -> socket.socket:
+ pass
+
+
+class SocketStream(ByteStream, _SocketProvider):
+ """
+ Transports bytes over a socket.
+
+ Supports all relevant extra attributes from :class:`~SocketAttribute`.
+ """
+
+
+class UNIXSocketStream(SocketStream):
+ @abstractmethod
+ async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None:
+ """
+ Send file descriptors along with a message to the peer.
+
+ :param message: a non-empty bytestring
+ :param fds: a collection of files (either numeric file descriptors or open file or socket
+ objects)
+ """
+
+ @abstractmethod
+ async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]:
+ """
+ Receive file descriptors along with a message from the peer.
+
+ :param msglen: length of the message to expect from the peer
+ :param maxfds: maximum number of file descriptors to expect from the peer
+ :return: a tuple of (message, file descriptors)
+ """
+
+
+class SocketListener(Listener[SocketStream], _SocketProvider):
+ """
+ Listens to incoming socket connections.
+
+ Supports all relevant extra attributes from :class:`~SocketAttribute`.
+ """
+
+ @abstractmethod
+ async def accept(self) -> SocketStream:
+ """Accept an incoming connection."""
+
+ async def serve(
+ self,
+ handler: Callable[[SocketStream], Any],
+ task_group: TaskGroup | None = None,
+ ) -> None:
+ async with AsyncExitStack() as exit_stack:
+ if task_group is None:
+ task_group = await exit_stack.enter_async_context(create_task_group())
+
+ while True:
+ stream = await self.accept()
+ task_group.start_soon(handler, stream)
+
+
+class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider):
+ """
+ Represents an unconnected UDP socket.
+
+ Supports all relevant extra attributes from :class:`~SocketAttribute`.
+ """
+
+ async def sendto(self, data: bytes, host: str, port: int) -> None:
+ """Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port)))."""
+ return await self.send((data, (host, port)))
+
+
+class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider):
+ """
+ Represents an connected UDP socket.
+
+ Supports all relevant extra attributes from :class:`~SocketAttribute`.
+ """
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/_streams.py b/Backend/venv/lib/python3.12/site-packages/anyio/abc/_streams.py
new file mode 100644
index 00000000..4fa7ccc9
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/abc/_streams.py
@@ -0,0 +1,203 @@
+from __future__ import annotations
+
+from abc import abstractmethod
+from typing import Any, Callable, Generic, TypeVar, Union
+
+from .._core._exceptions import EndOfStream
+from .._core._typedattr import TypedAttributeProvider
+from ._resources import AsyncResource
+from ._tasks import TaskGroup
+
+T_Item = TypeVar("T_Item")
+T_co = TypeVar("T_co", covariant=True)
+T_contra = TypeVar("T_contra", contravariant=True)
+
+
+class UnreliableObjectReceiveStream(
+ Generic[T_co], AsyncResource, TypedAttributeProvider
+):
+ """
+ An interface for receiving objects.
+
+ This interface makes no guarantees that the received messages arrive in the order in which they
+ were sent, or that no messages are missed.
+
+ Asynchronously iterating over objects of this type will yield objects matching the given type
+ parameter.
+ """
+
+ def __aiter__(self) -> UnreliableObjectReceiveStream[T_co]:
+ return self
+
+ async def __anext__(self) -> T_co:
+ try:
+ return await self.receive()
+ except EndOfStream:
+ raise StopAsyncIteration
+
+ @abstractmethod
+ async def receive(self) -> T_co:
+ """
+ Receive the next item.
+
+ :raises ~anyio.ClosedResourceError: if the receive stream has been explicitly
+ closed
+ :raises ~anyio.EndOfStream: if this stream has been closed from the other end
+ :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable
+ due to external causes
+ """
+
+
+class UnreliableObjectSendStream(
+ Generic[T_contra], AsyncResource, TypedAttributeProvider
+):
+ """
+ An interface for sending objects.
+
+ This interface makes no guarantees that the messages sent will reach the recipient(s) in the
+ same order in which they were sent, or at all.
+ """
+
+ @abstractmethod
+ async def send(self, item: T_contra) -> None:
+ """
+ Send an item to the peer(s).
+
+ :param item: the item to send
+ :raises ~anyio.ClosedResourceError: if the send stream has been explicitly
+ closed
+ :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable
+ due to external causes
+ """
+
+
+class UnreliableObjectStream(
+ UnreliableObjectReceiveStream[T_Item], UnreliableObjectSendStream[T_Item]
+):
+ """
+ A bidirectional message stream which does not guarantee the order or reliability of message
+ delivery.
+ """
+
+
+class ObjectReceiveStream(UnreliableObjectReceiveStream[T_co]):
+ """
+ A receive message stream which guarantees that messages are received in the same order in
+ which they were sent, and that no messages are missed.
+ """
+
+
+class ObjectSendStream(UnreliableObjectSendStream[T_contra]):
+ """
+ A send message stream which guarantees that messages are delivered in the same order in which
+ they were sent, without missing any messages in the middle.
+ """
+
+
+class ObjectStream(
+ ObjectReceiveStream[T_Item],
+ ObjectSendStream[T_Item],
+ UnreliableObjectStream[T_Item],
+):
+ """
+ A bidirectional message stream which guarantees the order and reliability of message delivery.
+ """
+
+ @abstractmethod
+ async def send_eof(self) -> None:
+ """
+ Send an end-of-file indication to the peer.
+
+ You should not try to send any further data to this stream after calling this method.
+ This method is idempotent (does nothing on successive calls).
+ """
+
+
+class ByteReceiveStream(AsyncResource, TypedAttributeProvider):
+ """
+ An interface for receiving bytes from a single peer.
+
+ Iterating this byte stream will yield a byte string of arbitrary length, but no more than
+ 65536 bytes.
+ """
+
+ def __aiter__(self) -> ByteReceiveStream:
+ return self
+
+ async def __anext__(self) -> bytes:
+ try:
+ return await self.receive()
+ except EndOfStream:
+ raise StopAsyncIteration
+
+ @abstractmethod
+ async def receive(self, max_bytes: int = 65536) -> bytes:
+ """
+ Receive at most ``max_bytes`` bytes from the peer.
+
+ .. note:: Implementors of this interface should not return an empty :class:`bytes` object,
+ and users should ignore them.
+
+ :param max_bytes: maximum number of bytes to receive
+ :return: the received bytes
+ :raises ~anyio.EndOfStream: if this stream has been closed from the other end
+ """
+
+
+class ByteSendStream(AsyncResource, TypedAttributeProvider):
+ """An interface for sending bytes to a single peer."""
+
+ @abstractmethod
+ async def send(self, item: bytes) -> None:
+ """
+ Send the given bytes to the peer.
+
+ :param item: the bytes to send
+ """
+
+
+class ByteStream(ByteReceiveStream, ByteSendStream):
+ """A bidirectional byte stream."""
+
+ @abstractmethod
+ async def send_eof(self) -> None:
+ """
+ Send an end-of-file indication to the peer.
+
+ You should not try to send any further data to this stream after calling this method.
+ This method is idempotent (does nothing on successive calls).
+ """
+
+
+#: Type alias for all unreliable bytes-oriented receive streams.
+AnyUnreliableByteReceiveStream = Union[
+ UnreliableObjectReceiveStream[bytes], ByteReceiveStream
+]
+#: Type alias for all unreliable bytes-oriented send streams.
+AnyUnreliableByteSendStream = Union[UnreliableObjectSendStream[bytes], ByteSendStream]
+#: Type alias for all unreliable bytes-oriented streams.
+AnyUnreliableByteStream = Union[UnreliableObjectStream[bytes], ByteStream]
+#: Type alias for all bytes-oriented receive streams.
+AnyByteReceiveStream = Union[ObjectReceiveStream[bytes], ByteReceiveStream]
+#: Type alias for all bytes-oriented send streams.
+AnyByteSendStream = Union[ObjectSendStream[bytes], ByteSendStream]
+#: Type alias for all bytes-oriented streams.
+AnyByteStream = Union[ObjectStream[bytes], ByteStream]
+
+
+class Listener(Generic[T_co], AsyncResource, TypedAttributeProvider):
+ """An interface for objects that let you accept incoming connections."""
+
+ @abstractmethod
+ async def serve(
+ self,
+ handler: Callable[[T_co], Any],
+ task_group: TaskGroup | None = None,
+ ) -> None:
+ """
+ Accept incoming connections as they come in and start tasks to handle them.
+
+ :param handler: a callable that will be used to handle each accepted connection
+ :param task_group: the task group that will be used to start tasks for handling each
+ accepted connection (if omitted, an ad-hoc task group will be created)
+ """
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/_subprocesses.py b/Backend/venv/lib/python3.12/site-packages/anyio/abc/_subprocesses.py
new file mode 100644
index 00000000..704b44a2
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/abc/_subprocesses.py
@@ -0,0 +1,79 @@
+from __future__ import annotations
+
+from abc import abstractmethod
+from signal import Signals
+
+from ._resources import AsyncResource
+from ._streams import ByteReceiveStream, ByteSendStream
+
+
+class Process(AsyncResource):
+ """An asynchronous version of :class:`subprocess.Popen`."""
+
+ @abstractmethod
+ async def wait(self) -> int:
+ """
+ Wait until the process exits.
+
+ :return: the exit code of the process
+ """
+
+ @abstractmethod
+ def terminate(self) -> None:
+ """
+ Terminates the process, gracefully if possible.
+
+ On Windows, this calls ``TerminateProcess()``.
+ On POSIX systems, this sends ``SIGTERM`` to the process.
+
+ .. seealso:: :meth:`subprocess.Popen.terminate`
+ """
+
+ @abstractmethod
+ def kill(self) -> None:
+ """
+ Kills the process.
+
+ On Windows, this calls ``TerminateProcess()``.
+ On POSIX systems, this sends ``SIGKILL`` to the process.
+
+ .. seealso:: :meth:`subprocess.Popen.kill`
+ """
+
+ @abstractmethod
+ def send_signal(self, signal: Signals) -> None:
+ """
+ Send a signal to the subprocess.
+
+ .. seealso:: :meth:`subprocess.Popen.send_signal`
+
+ :param signal: the signal number (e.g. :data:`signal.SIGHUP`)
+ """
+
+ @property
+ @abstractmethod
+ def pid(self) -> int:
+ """The process ID of the process."""
+
+ @property
+ @abstractmethod
+ def returncode(self) -> int | None:
+ """
+ The return code of the process. If the process has not yet terminated, this will be
+ ``None``.
+ """
+
+ @property
+ @abstractmethod
+ def stdin(self) -> ByteSendStream | None:
+ """The stream for the standard input of the process."""
+
+ @property
+ @abstractmethod
+ def stdout(self) -> ByteReceiveStream | None:
+ """The stream for the standard output of the process."""
+
+ @property
+ @abstractmethod
+ def stderr(self) -> ByteReceiveStream | None:
+ """The stream for the standard error output of the process."""
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/_tasks.py b/Backend/venv/lib/python3.12/site-packages/anyio/abc/_tasks.py
new file mode 100644
index 00000000..e48d3c1e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/abc/_tasks.py
@@ -0,0 +1,119 @@
+from __future__ import annotations
+
+import sys
+from abc import ABCMeta, abstractmethod
+from types import TracebackType
+from typing import TYPE_CHECKING, Any, Awaitable, Callable, TypeVar, overload
+from warnings import warn
+
+if sys.version_info >= (3, 8):
+ from typing import Protocol
+else:
+ from typing_extensions import Protocol
+
+if TYPE_CHECKING:
+ from anyio._core._tasks import CancelScope
+
+T_Retval = TypeVar("T_Retval")
+T_contra = TypeVar("T_contra", contravariant=True)
+
+
+class TaskStatus(Protocol[T_contra]):
+ @overload
+ def started(self: TaskStatus[None]) -> None:
+ ...
+
+ @overload
+ def started(self, value: T_contra) -> None:
+ ...
+
+ def started(self, value: T_contra | None = None) -> None:
+ """
+ Signal that the task has started.
+
+ :param value: object passed back to the starter of the task
+ """
+
+
+class TaskGroup(metaclass=ABCMeta):
+ """
+ Groups several asynchronous tasks together.
+
+ :ivar cancel_scope: the cancel scope inherited by all child tasks
+ :vartype cancel_scope: CancelScope
+ """
+
+ cancel_scope: CancelScope
+
+ async def spawn(
+ self,
+ func: Callable[..., Awaitable[Any]],
+ *args: object,
+ name: object = None,
+ ) -> None:
+ """
+ Start a new task in this task group.
+
+ :param func: a coroutine function
+ :param args: positional arguments to call the function with
+ :param name: name of the task, for the purposes of introspection and debugging
+
+ .. deprecated:: 3.0
+ Use :meth:`start_soon` instead. If your code needs AnyIO 2 compatibility, you
+ can keep using this until AnyIO 4.
+
+ """
+ warn(
+ 'spawn() is deprecated -- use start_soon() (without the "await") instead',
+ DeprecationWarning,
+ )
+ self.start_soon(func, *args, name=name)
+
+ @abstractmethod
+ def start_soon(
+ self,
+ func: Callable[..., Awaitable[Any]],
+ *args: object,
+ name: object = None,
+ ) -> None:
+ """
+ Start a new task in this task group.
+
+ :param func: a coroutine function
+ :param args: positional arguments to call the function with
+ :param name: name of the task, for the purposes of introspection and debugging
+
+ .. versionadded:: 3.0
+ """
+
+ @abstractmethod
+ async def start(
+ self,
+ func: Callable[..., Awaitable[Any]],
+ *args: object,
+ name: object = None,
+ ) -> Any:
+ """
+ Start a new task and wait until it signals for readiness.
+
+ :param func: a coroutine function
+ :param args: positional arguments to call the function with
+ :param name: name of the task, for the purposes of introspection and debugging
+ :return: the value passed to ``task_status.started()``
+ :raises RuntimeError: if the task finishes without calling ``task_status.started()``
+
+ .. versionadded:: 3.0
+ """
+
+ @abstractmethod
+ async def __aenter__(self) -> TaskGroup:
+ """Enter the task group context and allow starting new tasks."""
+
+ @abstractmethod
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> bool | None:
+ """Exit the task group context waiting for all tasks to finish."""
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/abc/_testing.py b/Backend/venv/lib/python3.12/site-packages/anyio/abc/_testing.py
new file mode 100644
index 00000000..ee2cff5c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/abc/_testing.py
@@ -0,0 +1,70 @@
+from __future__ import annotations
+
+import types
+from abc import ABCMeta, abstractmethod
+from collections.abc import AsyncGenerator, Iterable
+from typing import Any, Callable, Coroutine, TypeVar
+
+_T = TypeVar("_T")
+
+
+class TestRunner(metaclass=ABCMeta):
+ """
+ Encapsulates a running event loop. Every call made through this object will use the same event
+ loop.
+ """
+
+ def __enter__(self) -> TestRunner:
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: types.TracebackType | None,
+ ) -> bool | None:
+ self.close()
+ return None
+
+ @abstractmethod
+ def close(self) -> None:
+ """Close the event loop."""
+
+ @abstractmethod
+ def run_asyncgen_fixture(
+ self,
+ fixture_func: Callable[..., AsyncGenerator[_T, Any]],
+ kwargs: dict[str, Any],
+ ) -> Iterable[_T]:
+ """
+ Run an async generator fixture.
+
+ :param fixture_func: the fixture function
+ :param kwargs: keyword arguments to call the fixture function with
+ :return: an iterator yielding the value yielded from the async generator
+ """
+
+ @abstractmethod
+ def run_fixture(
+ self,
+ fixture_func: Callable[..., Coroutine[Any, Any, _T]],
+ kwargs: dict[str, Any],
+ ) -> _T:
+ """
+ Run an async fixture.
+
+ :param fixture_func: the fixture function
+ :param kwargs: keyword arguments to call the fixture function with
+ :return: the return value of the fixture function
+ """
+
+ @abstractmethod
+ def run_test(
+ self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any]
+ ) -> None:
+ """
+ Run an async test function.
+
+ :param test_func: the test function
+ :param kwargs: keyword arguments to call the test function with
+ """
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/from_thread.py b/Backend/venv/lib/python3.12/site-packages/anyio/from_thread.py
new file mode 100644
index 00000000..6b76861c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/from_thread.py
@@ -0,0 +1,500 @@
+from __future__ import annotations
+
+import threading
+from asyncio import iscoroutine
+from concurrent.futures import FIRST_COMPLETED, Future, ThreadPoolExecutor, wait
+from contextlib import AbstractContextManager, contextmanager
+from types import TracebackType
+from typing import (
+ Any,
+ AsyncContextManager,
+ Awaitable,
+ Callable,
+ ContextManager,
+ Generator,
+ Generic,
+ Iterable,
+ TypeVar,
+ cast,
+ overload,
+)
+from warnings import warn
+
+from ._core import _eventloop
+from ._core._eventloop import get_asynclib, get_cancelled_exc_class, threadlocals
+from ._core._synchronization import Event
+from ._core._tasks import CancelScope, create_task_group
+from .abc._tasks import TaskStatus
+
+T_Retval = TypeVar("T_Retval")
+T_co = TypeVar("T_co")
+
+
+def run(func: Callable[..., Awaitable[T_Retval]], *args: object) -> T_Retval:
+ """
+ Call a coroutine function from a worker thread.
+
+ :param func: a coroutine function
+ :param args: positional arguments for the callable
+ :return: the return value of the coroutine function
+
+ """
+ try:
+ asynclib = threadlocals.current_async_module
+ except AttributeError:
+ raise RuntimeError("This function can only be run from an AnyIO worker thread")
+
+ return asynclib.run_async_from_thread(func, *args)
+
+
+def run_async_from_thread(
+ func: Callable[..., Awaitable[T_Retval]], *args: object
+) -> T_Retval:
+ warn(
+ "run_async_from_thread() has been deprecated, use anyio.from_thread.run() instead",
+ DeprecationWarning,
+ )
+ return run(func, *args)
+
+
+def run_sync(func: Callable[..., T_Retval], *args: object) -> T_Retval:
+ """
+ Call a function in the event loop thread from a worker thread.
+
+ :param func: a callable
+ :param args: positional arguments for the callable
+ :return: the return value of the callable
+
+ """
+ try:
+ asynclib = threadlocals.current_async_module
+ except AttributeError:
+ raise RuntimeError("This function can only be run from an AnyIO worker thread")
+
+ return asynclib.run_sync_from_thread(func, *args)
+
+
+def run_sync_from_thread(func: Callable[..., T_Retval], *args: object) -> T_Retval:
+ warn(
+ "run_sync_from_thread() has been deprecated, use anyio.from_thread.run_sync() instead",
+ DeprecationWarning,
+ )
+ return run_sync(func, *args)
+
+
+class _BlockingAsyncContextManager(Generic[T_co], AbstractContextManager):
+ _enter_future: Future
+ _exit_future: Future
+ _exit_event: Event
+ _exit_exc_info: tuple[
+ type[BaseException] | None, BaseException | None, TracebackType | None
+ ] = (None, None, None)
+
+ def __init__(self, async_cm: AsyncContextManager[T_co], portal: BlockingPortal):
+ self._async_cm = async_cm
+ self._portal = portal
+
+ async def run_async_cm(self) -> bool | None:
+ try:
+ self._exit_event = Event()
+ value = await self._async_cm.__aenter__()
+ except BaseException as exc:
+ self._enter_future.set_exception(exc)
+ raise
+ else:
+ self._enter_future.set_result(value)
+
+ try:
+ # Wait for the sync context manager to exit.
+ # This next statement can raise `get_cancelled_exc_class()` if
+ # something went wrong in a task group in this async context
+ # manager.
+ await self._exit_event.wait()
+ finally:
+ # In case of cancellation, it could be that we end up here before
+ # `_BlockingAsyncContextManager.__exit__` is called, and an
+ # `_exit_exc_info` has been set.
+ result = await self._async_cm.__aexit__(*self._exit_exc_info)
+ return result
+
+ def __enter__(self) -> T_co:
+ self._enter_future = Future()
+ self._exit_future = self._portal.start_task_soon(self.run_async_cm)
+ cm = self._enter_future.result()
+ return cast(T_co, cm)
+
+ def __exit__(
+ self,
+ __exc_type: type[BaseException] | None,
+ __exc_value: BaseException | None,
+ __traceback: TracebackType | None,
+ ) -> bool | None:
+ self._exit_exc_info = __exc_type, __exc_value, __traceback
+ self._portal.call(self._exit_event.set)
+ return self._exit_future.result()
+
+
+class _BlockingPortalTaskStatus(TaskStatus):
+ def __init__(self, future: Future):
+ self._future = future
+
+ def started(self, value: object = None) -> None:
+ self._future.set_result(value)
+
+
+class BlockingPortal:
+ """An object that lets external threads run code in an asynchronous event loop."""
+
+ def __new__(cls) -> BlockingPortal:
+ return get_asynclib().BlockingPortal()
+
+ def __init__(self) -> None:
+ self._event_loop_thread_id: int | None = threading.get_ident()
+ self._stop_event = Event()
+ self._task_group = create_task_group()
+ self._cancelled_exc_class = get_cancelled_exc_class()
+
+ async def __aenter__(self) -> BlockingPortal:
+ await self._task_group.__aenter__()
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> bool | None:
+ await self.stop()
+ return await self._task_group.__aexit__(exc_type, exc_val, exc_tb)
+
+ def _check_running(self) -> None:
+ if self._event_loop_thread_id is None:
+ raise RuntimeError("This portal is not running")
+ if self._event_loop_thread_id == threading.get_ident():
+ raise RuntimeError(
+ "This method cannot be called from the event loop thread"
+ )
+
+ async def sleep_until_stopped(self) -> None:
+ """Sleep until :meth:`stop` is called."""
+ await self._stop_event.wait()
+
+ async def stop(self, cancel_remaining: bool = False) -> None:
+ """
+ Signal the portal to shut down.
+
+ This marks the portal as no longer accepting new calls and exits from
+ :meth:`sleep_until_stopped`.
+
+ :param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` to let them
+ finish before returning
+
+ """
+ self._event_loop_thread_id = None
+ self._stop_event.set()
+ if cancel_remaining:
+ self._task_group.cancel_scope.cancel()
+
+ async def _call_func(
+ self, func: Callable, args: tuple, kwargs: dict[str, Any], future: Future
+ ) -> None:
+ def callback(f: Future) -> None:
+ if f.cancelled() and self._event_loop_thread_id not in (
+ None,
+ threading.get_ident(),
+ ):
+ self.call(scope.cancel)
+
+ try:
+ retval = func(*args, **kwargs)
+ if iscoroutine(retval):
+ with CancelScope() as scope:
+ if future.cancelled():
+ scope.cancel()
+ else:
+ future.add_done_callback(callback)
+
+ retval = await retval
+ except self._cancelled_exc_class:
+ future.cancel()
+ except BaseException as exc:
+ if not future.cancelled():
+ future.set_exception(exc)
+
+ # Let base exceptions fall through
+ if not isinstance(exc, Exception):
+ raise
+ else:
+ if not future.cancelled():
+ future.set_result(retval)
+ finally:
+ scope = None # type: ignore[assignment]
+
+ def _spawn_task_from_thread(
+ self,
+ func: Callable,
+ args: tuple,
+ kwargs: dict[str, Any],
+ name: object,
+ future: Future,
+ ) -> None:
+ """
+ Spawn a new task using the given callable.
+
+ Implementors must ensure that the future is resolved when the task finishes.
+
+ :param func: a callable
+ :param args: positional arguments to be passed to the callable
+ :param kwargs: keyword arguments to be passed to the callable
+ :param name: name of the task (will be coerced to a string if not ``None``)
+ :param future: a future that will resolve to the return value of the callable, or the
+ exception raised during its execution
+
+ """
+ raise NotImplementedError
+
+ @overload
+ def call(self, func: Callable[..., Awaitable[T_Retval]], *args: object) -> T_Retval:
+ ...
+
+ @overload
+ def call(self, func: Callable[..., T_Retval], *args: object) -> T_Retval:
+ ...
+
+ def call(
+ self, func: Callable[..., Awaitable[T_Retval] | T_Retval], *args: object
+ ) -> T_Retval:
+ """
+ Call the given function in the event loop thread.
+
+ If the callable returns a coroutine object, it is awaited on.
+
+ :param func: any callable
+ :raises RuntimeError: if the portal is not running or if this method is called from within
+ the event loop thread
+
+ """
+ return cast(T_Retval, self.start_task_soon(func, *args).result())
+
+ @overload
+ def spawn_task(
+ self,
+ func: Callable[..., Awaitable[T_Retval]],
+ *args: object,
+ name: object = None,
+ ) -> Future[T_Retval]:
+ ...
+
+ @overload
+ def spawn_task(
+ self, func: Callable[..., T_Retval], *args: object, name: object = None
+ ) -> Future[T_Retval]:
+ ...
+
+ def spawn_task(
+ self,
+ func: Callable[..., Awaitable[T_Retval] | T_Retval],
+ *args: object,
+ name: object = None,
+ ) -> Future[T_Retval]:
+ """
+ Start a task in the portal's task group.
+
+ :param func: the target coroutine function
+ :param args: positional arguments passed to ``func``
+ :param name: name of the task (will be coerced to a string if not ``None``)
+ :return: a future that resolves with the return value of the callable if the task completes
+ successfully, or with the exception raised in the task
+ :raises RuntimeError: if the portal is not running or if this method is called from within
+ the event loop thread
+
+ .. versionadded:: 2.1
+ .. deprecated:: 3.0
+ Use :meth:`start_task_soon` instead. If your code needs AnyIO 2 compatibility, you
+ can keep using this until AnyIO 4.
+
+ """
+ warn(
+ "spawn_task() is deprecated -- use start_task_soon() instead",
+ DeprecationWarning,
+ )
+ return self.start_task_soon(func, *args, name=name) # type: ignore[arg-type]
+
+ @overload
+ def start_task_soon(
+ self,
+ func: Callable[..., Awaitable[T_Retval]],
+ *args: object,
+ name: object = None,
+ ) -> Future[T_Retval]:
+ ...
+
+ @overload
+ def start_task_soon(
+ self, func: Callable[..., T_Retval], *args: object, name: object = None
+ ) -> Future[T_Retval]:
+ ...
+
+ def start_task_soon(
+ self,
+ func: Callable[..., Awaitable[T_Retval] | T_Retval],
+ *args: object,
+ name: object = None,
+ ) -> Future[T_Retval]:
+ """
+ Start a task in the portal's task group.
+
+ The task will be run inside a cancel scope which can be cancelled by cancelling the
+ returned future.
+
+ :param func: the target function
+ :param args: positional arguments passed to ``func``
+ :param name: name of the task (will be coerced to a string if not ``None``)
+ :return: a future that resolves with the return value of the callable if the
+ task completes successfully, or with the exception raised in the task
+ :raises RuntimeError: if the portal is not running or if this method is called
+ from within the event loop thread
+ :rtype: concurrent.futures.Future[T_Retval]
+
+ .. versionadded:: 3.0
+
+ """
+ self._check_running()
+ f: Future = Future()
+ self._spawn_task_from_thread(func, args, {}, name, f)
+ return f
+
+ def start_task(
+ self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None
+ ) -> tuple[Future[Any], Any]:
+ """
+ Start a task in the portal's task group and wait until it signals for readiness.
+
+ This method works the same way as :meth:`.abc.TaskGroup.start`.
+
+ :param func: the target function
+ :param args: positional arguments passed to ``func``
+ :param name: name of the task (will be coerced to a string if not ``None``)
+ :return: a tuple of (future, task_status_value) where the ``task_status_value``
+ is the value passed to ``task_status.started()`` from within the target
+ function
+ :rtype: tuple[concurrent.futures.Future[Any], Any]
+
+ .. versionadded:: 3.0
+
+ """
+
+ def task_done(future: Future) -> None:
+ if not task_status_future.done():
+ if future.cancelled():
+ task_status_future.cancel()
+ elif future.exception():
+ task_status_future.set_exception(future.exception())
+ else:
+ exc = RuntimeError(
+ "Task exited without calling task_status.started()"
+ )
+ task_status_future.set_exception(exc)
+
+ self._check_running()
+ task_status_future: Future = Future()
+ task_status = _BlockingPortalTaskStatus(task_status_future)
+ f: Future = Future()
+ f.add_done_callback(task_done)
+ self._spawn_task_from_thread(func, args, {"task_status": task_status}, name, f)
+ return f, task_status_future.result()
+
+ def wrap_async_context_manager(
+ self, cm: AsyncContextManager[T_co]
+ ) -> ContextManager[T_co]:
+ """
+ Wrap an async context manager as a synchronous context manager via this portal.
+
+ Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping in the
+ middle until the synchronous context manager exits.
+
+ :param cm: an asynchronous context manager
+ :return: a synchronous context manager
+
+ .. versionadded:: 2.1
+
+ """
+ return _BlockingAsyncContextManager(cm, self)
+
+
+def create_blocking_portal() -> BlockingPortal:
+ """
+ Create a portal for running functions in the event loop thread from external threads.
+
+ Use this function in asynchronous code when you need to allow external threads access to the
+ event loop where your asynchronous code is currently running.
+
+ .. deprecated:: 3.0
+ Use :class:`.BlockingPortal` directly.
+
+ """
+ warn(
+ "create_blocking_portal() has been deprecated -- use anyio.from_thread.BlockingPortal() "
+ "directly",
+ DeprecationWarning,
+ )
+ return BlockingPortal()
+
+
+@contextmanager
+def start_blocking_portal(
+ backend: str = "asyncio", backend_options: dict[str, Any] | None = None
+) -> Generator[BlockingPortal, Any, None]:
+ """
+ Start a new event loop in a new thread and run a blocking portal in its main task.
+
+ The parameters are the same as for :func:`~anyio.run`.
+
+ :param backend: name of the backend
+ :param backend_options: backend options
+ :return: a context manager that yields a blocking portal
+
+ .. versionchanged:: 3.0
+ Usage as a context manager is now required.
+
+ """
+
+ async def run_portal() -> None:
+ async with BlockingPortal() as portal_:
+ if future.set_running_or_notify_cancel():
+ future.set_result(portal_)
+ await portal_.sleep_until_stopped()
+
+ future: Future[BlockingPortal] = Future()
+ with ThreadPoolExecutor(1) as executor:
+ run_future = executor.submit(
+ _eventloop.run,
+ run_portal, # type: ignore[arg-type]
+ backend=backend,
+ backend_options=backend_options,
+ )
+ try:
+ wait(
+ cast(Iterable[Future], [run_future, future]),
+ return_when=FIRST_COMPLETED,
+ )
+ except BaseException:
+ future.cancel()
+ run_future.cancel()
+ raise
+
+ if future.done():
+ portal = future.result()
+ cancel_remaining_tasks = False
+ try:
+ yield portal
+ except BaseException:
+ cancel_remaining_tasks = True
+ raise
+ finally:
+ try:
+ portal.call(portal.stop, cancel_remaining_tasks)
+ except RuntimeError:
+ pass
+
+ run_future.result()
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/lowlevel.py b/Backend/venv/lib/python3.12/site-packages/anyio/lowlevel.py
new file mode 100644
index 00000000..0e908c65
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/lowlevel.py
@@ -0,0 +1,174 @@
+from __future__ import annotations
+
+import enum
+import sys
+from dataclasses import dataclass
+from typing import Any, Generic, TypeVar, overload
+from weakref import WeakKeyDictionary
+
+from ._core._eventloop import get_asynclib
+
+if sys.version_info >= (3, 8):
+ from typing import Literal
+else:
+ from typing_extensions import Literal
+
+T = TypeVar("T")
+D = TypeVar("D")
+
+
+async def checkpoint() -> None:
+ """
+ Check for cancellation and allow the scheduler to switch to another task.
+
+ Equivalent to (but more efficient than)::
+
+ await checkpoint_if_cancelled()
+ await cancel_shielded_checkpoint()
+
+
+ .. versionadded:: 3.0
+
+ """
+ await get_asynclib().checkpoint()
+
+
+async def checkpoint_if_cancelled() -> None:
+ """
+ Enter a checkpoint if the enclosing cancel scope has been cancelled.
+
+ This does not allow the scheduler to switch to a different task.
+
+ .. versionadded:: 3.0
+
+ """
+ await get_asynclib().checkpoint_if_cancelled()
+
+
+async def cancel_shielded_checkpoint() -> None:
+ """
+ Allow the scheduler to switch to another task but without checking for cancellation.
+
+ Equivalent to (but potentially more efficient than)::
+
+ with CancelScope(shield=True):
+ await checkpoint()
+
+
+ .. versionadded:: 3.0
+
+ """
+ await get_asynclib().cancel_shielded_checkpoint()
+
+
+def current_token() -> object:
+ """Return a backend specific token object that can be used to get back to the event loop."""
+ return get_asynclib().current_token()
+
+
+_run_vars: WeakKeyDictionary[Any, dict[str, Any]] = WeakKeyDictionary()
+_token_wrappers: dict[Any, _TokenWrapper] = {}
+
+
+@dataclass(frozen=True)
+class _TokenWrapper:
+ __slots__ = "_token", "__weakref__"
+ _token: object
+
+
+class _NoValueSet(enum.Enum):
+ NO_VALUE_SET = enum.auto()
+
+
+class RunvarToken(Generic[T]):
+ __slots__ = "_var", "_value", "_redeemed"
+
+ def __init__(self, var: RunVar[T], value: T | Literal[_NoValueSet.NO_VALUE_SET]):
+ self._var = var
+ self._value: T | Literal[_NoValueSet.NO_VALUE_SET] = value
+ self._redeemed = False
+
+
+class RunVar(Generic[T]):
+ """
+ Like a :class:`~contextvars.ContextVar`, except scoped to the running event loop.
+ """
+
+ __slots__ = "_name", "_default"
+
+ NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET
+
+ _token_wrappers: set[_TokenWrapper] = set()
+
+ def __init__(
+ self,
+ name: str,
+ default: T | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET,
+ ):
+ self._name = name
+ self._default = default
+
+ @property
+ def _current_vars(self) -> dict[str, T]:
+ token = current_token()
+ while True:
+ try:
+ return _run_vars[token]
+ except TypeError:
+ # Happens when token isn't weak referable (TrioToken).
+ # This workaround does mean that some memory will leak on Trio until the problem
+ # is fixed on their end.
+ token = _TokenWrapper(token)
+ self._token_wrappers.add(token)
+ except KeyError:
+ run_vars = _run_vars[token] = {}
+ return run_vars
+
+ @overload
+ def get(self, default: D) -> T | D:
+ ...
+
+ @overload
+ def get(self) -> T:
+ ...
+
+ def get(
+ self, default: D | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET
+ ) -> T | D:
+ try:
+ return self._current_vars[self._name]
+ except KeyError:
+ if default is not RunVar.NO_VALUE_SET:
+ return default
+ elif self._default is not RunVar.NO_VALUE_SET:
+ return self._default
+
+ raise LookupError(
+ f'Run variable "{self._name}" has no value and no default set'
+ )
+
+ def set(self, value: T) -> RunvarToken[T]:
+ current_vars = self._current_vars
+ token = RunvarToken(self, current_vars.get(self._name, RunVar.NO_VALUE_SET))
+ current_vars[self._name] = value
+ return token
+
+ def reset(self, token: RunvarToken[T]) -> None:
+ if token._var is not self:
+ raise ValueError("This token does not belong to this RunVar")
+
+ if token._redeemed:
+ raise ValueError("This token has already been used")
+
+ if token._value is _NoValueSet.NO_VALUE_SET:
+ try:
+ del self._current_vars[self._name]
+ except KeyError:
+ pass
+ else:
+ self._current_vars[self._name] = token._value
+
+ token._redeemed = True
+
+ def __repr__(self) -> str:
+ return f""
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/py.typed b/Backend/venv/lib/python3.12/site-packages/anyio/py.typed
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/pytest_plugin.py b/Backend/venv/lib/python3.12/site-packages/anyio/pytest_plugin.py
new file mode 100644
index 00000000..044ce691
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/pytest_plugin.py
@@ -0,0 +1,142 @@
+from __future__ import annotations
+
+from contextlib import contextmanager
+from inspect import isasyncgenfunction, iscoroutinefunction
+from typing import Any, Dict, Generator, Tuple, cast
+
+import pytest
+import sniffio
+
+from ._core._eventloop import get_all_backends, get_asynclib
+from .abc import TestRunner
+
+_current_runner: TestRunner | None = None
+
+
+def extract_backend_and_options(backend: object) -> tuple[str, dict[str, Any]]:
+ if isinstance(backend, str):
+ return backend, {}
+ elif isinstance(backend, tuple) and len(backend) == 2:
+ if isinstance(backend[0], str) and isinstance(backend[1], dict):
+ return cast(Tuple[str, Dict[str, Any]], backend)
+
+ raise TypeError("anyio_backend must be either a string or tuple of (string, dict)")
+
+
+@contextmanager
+def get_runner(
+ backend_name: str, backend_options: dict[str, Any]
+) -> Generator[TestRunner, object, None]:
+ global _current_runner
+ if _current_runner:
+ yield _current_runner
+ return
+
+ asynclib = get_asynclib(backend_name)
+ token = None
+ if sniffio.current_async_library_cvar.get(None) is None:
+ # Since we're in control of the event loop, we can cache the name of the async library
+ token = sniffio.current_async_library_cvar.set(backend_name)
+
+ try:
+ backend_options = backend_options or {}
+ with asynclib.TestRunner(**backend_options) as runner:
+ _current_runner = runner
+ yield runner
+ finally:
+ _current_runner = None
+ if token:
+ sniffio.current_async_library_cvar.reset(token)
+
+
+def pytest_configure(config: Any) -> None:
+ config.addinivalue_line(
+ "markers",
+ "anyio: mark the (coroutine function) test to be run "
+ "asynchronously via anyio.",
+ )
+
+
+def pytest_fixture_setup(fixturedef: Any, request: Any) -> None:
+ def wrapper(*args, anyio_backend, **kwargs): # type: ignore[no-untyped-def]
+ backend_name, backend_options = extract_backend_and_options(anyio_backend)
+ if has_backend_arg:
+ kwargs["anyio_backend"] = anyio_backend
+
+ with get_runner(backend_name, backend_options) as runner:
+ if isasyncgenfunction(func):
+ yield from runner.run_asyncgen_fixture(func, kwargs)
+ else:
+ yield runner.run_fixture(func, kwargs)
+
+ # Only apply this to coroutine functions and async generator functions in requests that involve
+ # the anyio_backend fixture
+ func = fixturedef.func
+ if isasyncgenfunction(func) or iscoroutinefunction(func):
+ if "anyio_backend" in request.fixturenames:
+ has_backend_arg = "anyio_backend" in fixturedef.argnames
+ fixturedef.func = wrapper
+ if not has_backend_arg:
+ fixturedef.argnames += ("anyio_backend",)
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_pycollect_makeitem(collector: Any, name: Any, obj: Any) -> None:
+ if collector.istestfunction(obj, name):
+ inner_func = obj.hypothesis.inner_test if hasattr(obj, "hypothesis") else obj
+ if iscoroutinefunction(inner_func):
+ marker = collector.get_closest_marker("anyio")
+ own_markers = getattr(obj, "pytestmark", ())
+ if marker or any(marker.name == "anyio" for marker in own_markers):
+ pytest.mark.usefixtures("anyio_backend")(obj)
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_pyfunc_call(pyfuncitem: Any) -> bool | None:
+ def run_with_hypothesis(**kwargs: Any) -> None:
+ with get_runner(backend_name, backend_options) as runner:
+ runner.run_test(original_func, kwargs)
+
+ backend = pyfuncitem.funcargs.get("anyio_backend")
+ if backend:
+ backend_name, backend_options = extract_backend_and_options(backend)
+
+ if hasattr(pyfuncitem.obj, "hypothesis"):
+ # Wrap the inner test function unless it's already wrapped
+ original_func = pyfuncitem.obj.hypothesis.inner_test
+ if original_func.__qualname__ != run_with_hypothesis.__qualname__:
+ if iscoroutinefunction(original_func):
+ pyfuncitem.obj.hypothesis.inner_test = run_with_hypothesis
+
+ return None
+
+ if iscoroutinefunction(pyfuncitem.obj):
+ funcargs = pyfuncitem.funcargs
+ testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames}
+ with get_runner(backend_name, backend_options) as runner:
+ runner.run_test(pyfuncitem.obj, testargs)
+
+ return True
+
+ return None
+
+
+@pytest.fixture(params=get_all_backends())
+def anyio_backend(request: Any) -> Any:
+ return request.param
+
+
+@pytest.fixture
+def anyio_backend_name(anyio_backend: Any) -> str:
+ if isinstance(anyio_backend, str):
+ return anyio_backend
+ else:
+ return anyio_backend[0]
+
+
+@pytest.fixture
+def anyio_backend_options(anyio_backend: Any) -> dict[str, Any]:
+ if isinstance(anyio_backend, str):
+ return {}
+ else:
+ return anyio_backend[1]
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__init__.py b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..f5c5151d
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc
new file mode 100644
index 00000000..b8f382c1
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc
new file mode 100644
index 00000000..7adf87bf
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc
new file mode 100644
index 00000000..fde8fc5a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc
new file mode 100644
index 00000000..52edbeff
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc
new file mode 100644
index 00000000..dfe57d2d
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc
new file mode 100644
index 00000000..6afc6ea3
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/buffered.py b/Backend/venv/lib/python3.12/site-packages/anyio/streams/buffered.py
new file mode 100644
index 00000000..11474c16
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/streams/buffered.py
@@ -0,0 +1,118 @@
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+from typing import Any, Callable, Mapping
+
+from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead
+from ..abc import AnyByteReceiveStream, ByteReceiveStream
+
+
+@dataclass(eq=False)
+class BufferedByteReceiveStream(ByteReceiveStream):
+ """
+ Wraps any bytes-based receive stream and uses a buffer to provide sophisticated receiving
+ capabilities in the form of a byte stream.
+ """
+
+ receive_stream: AnyByteReceiveStream
+ _buffer: bytearray = field(init=False, default_factory=bytearray)
+ _closed: bool = field(init=False, default=False)
+
+ async def aclose(self) -> None:
+ await self.receive_stream.aclose()
+ self._closed = True
+
+ @property
+ def buffer(self) -> bytes:
+ """The bytes currently in the buffer."""
+ return bytes(self._buffer)
+
+ @property
+ def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
+ return self.receive_stream.extra_attributes
+
+ async def receive(self, max_bytes: int = 65536) -> bytes:
+ if self._closed:
+ raise ClosedResourceError
+
+ if self._buffer:
+ chunk = bytes(self._buffer[:max_bytes])
+ del self._buffer[:max_bytes]
+ return chunk
+ elif isinstance(self.receive_stream, ByteReceiveStream):
+ return await self.receive_stream.receive(max_bytes)
+ else:
+ # With a bytes-oriented object stream, we need to handle any surplus bytes we get from
+ # the receive() call
+ chunk = await self.receive_stream.receive()
+ if len(chunk) > max_bytes:
+ # Save the surplus bytes in the buffer
+ self._buffer.extend(chunk[max_bytes:])
+ return chunk[:max_bytes]
+ else:
+ return chunk
+
+ async def receive_exactly(self, nbytes: int) -> bytes:
+ """
+ Read exactly the given amount of bytes from the stream.
+
+ :param nbytes: the number of bytes to read
+ :return: the bytes read
+ :raises ~anyio.IncompleteRead: if the stream was closed before the requested
+ amount of bytes could be read from the stream
+
+ """
+ while True:
+ remaining = nbytes - len(self._buffer)
+ if remaining <= 0:
+ retval = self._buffer[:nbytes]
+ del self._buffer[:nbytes]
+ return bytes(retval)
+
+ try:
+ if isinstance(self.receive_stream, ByteReceiveStream):
+ chunk = await self.receive_stream.receive(remaining)
+ else:
+ chunk = await self.receive_stream.receive()
+ except EndOfStream as exc:
+ raise IncompleteRead from exc
+
+ self._buffer.extend(chunk)
+
+ async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes:
+ """
+ Read from the stream until the delimiter is found or max_bytes have been read.
+
+ :param delimiter: the marker to look for in the stream
+ :param max_bytes: maximum number of bytes that will be read before raising
+ :exc:`~anyio.DelimiterNotFound`
+ :return: the bytes read (not including the delimiter)
+ :raises ~anyio.IncompleteRead: if the stream was closed before the delimiter
+ was found
+ :raises ~anyio.DelimiterNotFound: if the delimiter is not found within the
+ bytes read up to the maximum allowed
+
+ """
+ delimiter_size = len(delimiter)
+ offset = 0
+ while True:
+ # Check if the delimiter can be found in the current buffer
+ index = self._buffer.find(delimiter, offset)
+ if index >= 0:
+ found = self._buffer[:index]
+ del self._buffer[: index + len(delimiter) :]
+ return bytes(found)
+
+ # Check if the buffer is already at or over the limit
+ if len(self._buffer) >= max_bytes:
+ raise DelimiterNotFound(max_bytes)
+
+ # Read more data into the buffer from the socket
+ try:
+ data = await self.receive_stream.receive()
+ except EndOfStream as exc:
+ raise IncompleteRead from exc
+
+ # Move the offset forward and add the new data to the buffer
+ offset = max(len(self._buffer) - delimiter_size + 1, 0)
+ self._buffer.extend(data)
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/file.py b/Backend/venv/lib/python3.12/site-packages/anyio/streams/file.py
new file mode 100644
index 00000000..2840d40a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/streams/file.py
@@ -0,0 +1,147 @@
+from __future__ import annotations
+
+from io import SEEK_SET, UnsupportedOperation
+from os import PathLike
+from pathlib import Path
+from typing import Any, BinaryIO, Callable, Mapping, cast
+
+from .. import (
+ BrokenResourceError,
+ ClosedResourceError,
+ EndOfStream,
+ TypedAttributeSet,
+ to_thread,
+ typed_attribute,
+)
+from ..abc import ByteReceiveStream, ByteSendStream
+
+
+class FileStreamAttribute(TypedAttributeSet):
+ #: the open file descriptor
+ file: BinaryIO = typed_attribute()
+ #: the path of the file on the file system, if available (file must be a real file)
+ path: Path = typed_attribute()
+ #: the file number, if available (file must be a real file or a TTY)
+ fileno: int = typed_attribute()
+
+
+class _BaseFileStream:
+ def __init__(self, file: BinaryIO):
+ self._file = file
+
+ async def aclose(self) -> None:
+ await to_thread.run_sync(self._file.close)
+
+ @property
+ def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
+ attributes: dict[Any, Callable[[], Any]] = {
+ FileStreamAttribute.file: lambda: self._file,
+ }
+
+ if hasattr(self._file, "name"):
+ attributes[FileStreamAttribute.path] = lambda: Path(self._file.name)
+
+ try:
+ self._file.fileno()
+ except UnsupportedOperation:
+ pass
+ else:
+ attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno()
+
+ return attributes
+
+
+class FileReadStream(_BaseFileStream, ByteReceiveStream):
+ """
+ A byte stream that reads from a file in the file system.
+
+ :param file: a file that has been opened for reading in binary mode
+
+ .. versionadded:: 3.0
+ """
+
+ @classmethod
+ async def from_path(cls, path: str | PathLike[str]) -> FileReadStream:
+ """
+ Create a file read stream by opening the given file.
+
+ :param path: path of the file to read from
+
+ """
+ file = await to_thread.run_sync(Path(path).open, "rb")
+ return cls(cast(BinaryIO, file))
+
+ async def receive(self, max_bytes: int = 65536) -> bytes:
+ try:
+ data = await to_thread.run_sync(self._file.read, max_bytes)
+ except ValueError:
+ raise ClosedResourceError from None
+ except OSError as exc:
+ raise BrokenResourceError from exc
+
+ if data:
+ return data
+ else:
+ raise EndOfStream
+
+ async def seek(self, position: int, whence: int = SEEK_SET) -> int:
+ """
+ Seek the file to the given position.
+
+ .. seealso:: :meth:`io.IOBase.seek`
+
+ .. note:: Not all file descriptors are seekable.
+
+ :param position: position to seek the file to
+ :param whence: controls how ``position`` is interpreted
+ :return: the new absolute position
+ :raises OSError: if the file is not seekable
+
+ """
+ return await to_thread.run_sync(self._file.seek, position, whence)
+
+ async def tell(self) -> int:
+ """
+ Return the current stream position.
+
+ .. note:: Not all file descriptors are seekable.
+
+ :return: the current absolute position
+ :raises OSError: if the file is not seekable
+
+ """
+ return await to_thread.run_sync(self._file.tell)
+
+
+class FileWriteStream(_BaseFileStream, ByteSendStream):
+ """
+ A byte stream that writes to a file in the file system.
+
+ :param file: a file that has been opened for writing in binary mode
+
+ .. versionadded:: 3.0
+ """
+
+ @classmethod
+ async def from_path(
+ cls, path: str | PathLike[str], append: bool = False
+ ) -> FileWriteStream:
+ """
+ Create a file write stream by opening the given file for writing.
+
+ :param path: path of the file to write to
+ :param append: if ``True``, open the file for appending; if ``False``, any existing file
+ at the given path will be truncated
+
+ """
+ mode = "ab" if append else "wb"
+ file = await to_thread.run_sync(Path(path).open, mode)
+ return cls(cast(BinaryIO, file))
+
+ async def send(self, item: bytes) -> None:
+ try:
+ await to_thread.run_sync(self._file.write, item)
+ except ValueError:
+ raise ClosedResourceError from None
+ except OSError as exc:
+ raise BrokenResourceError from exc
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/memory.py b/Backend/venv/lib/python3.12/site-packages/anyio/streams/memory.py
new file mode 100644
index 00000000..a6499c13
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/streams/memory.py
@@ -0,0 +1,279 @@
+from __future__ import annotations
+
+from collections import OrderedDict, deque
+from dataclasses import dataclass, field
+from types import TracebackType
+from typing import Generic, NamedTuple, TypeVar
+
+from .. import (
+ BrokenResourceError,
+ ClosedResourceError,
+ EndOfStream,
+ WouldBlock,
+ get_cancelled_exc_class,
+)
+from .._core._compat import DeprecatedAwaitable
+from ..abc import Event, ObjectReceiveStream, ObjectSendStream
+from ..lowlevel import checkpoint
+
+T_Item = TypeVar("T_Item")
+T_co = TypeVar("T_co", covariant=True)
+T_contra = TypeVar("T_contra", contravariant=True)
+
+
+class MemoryObjectStreamStatistics(NamedTuple):
+ current_buffer_used: int #: number of items stored in the buffer
+ #: maximum number of items that can be stored on this stream (or :data:`math.inf`)
+ max_buffer_size: float
+ open_send_streams: int #: number of unclosed clones of the send stream
+ open_receive_streams: int #: number of unclosed clones of the receive stream
+ tasks_waiting_send: int #: number of tasks blocked on :meth:`MemoryObjectSendStream.send`
+ #: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive`
+ tasks_waiting_receive: int
+
+
+@dataclass(eq=False)
+class MemoryObjectStreamState(Generic[T_Item]):
+ max_buffer_size: float = field()
+ buffer: deque[T_Item] = field(init=False, default_factory=deque)
+ open_send_channels: int = field(init=False, default=0)
+ open_receive_channels: int = field(init=False, default=0)
+ waiting_receivers: OrderedDict[Event, list[T_Item]] = field(
+ init=False, default_factory=OrderedDict
+ )
+ waiting_senders: OrderedDict[Event, T_Item] = field(
+ init=False, default_factory=OrderedDict
+ )
+
+ def statistics(self) -> MemoryObjectStreamStatistics:
+ return MemoryObjectStreamStatistics(
+ len(self.buffer),
+ self.max_buffer_size,
+ self.open_send_channels,
+ self.open_receive_channels,
+ len(self.waiting_senders),
+ len(self.waiting_receivers),
+ )
+
+
+@dataclass(eq=False)
+class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]):
+ _state: MemoryObjectStreamState[T_co]
+ _closed: bool = field(init=False, default=False)
+
+ def __post_init__(self) -> None:
+ self._state.open_receive_channels += 1
+
+ def receive_nowait(self) -> T_co:
+ """
+ Receive the next item if it can be done without waiting.
+
+ :return: the received item
+ :raises ~anyio.ClosedResourceError: if this send stream has been closed
+ :raises ~anyio.EndOfStream: if the buffer is empty and this stream has been
+ closed from the sending end
+ :raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks
+ waiting to send
+
+ """
+ if self._closed:
+ raise ClosedResourceError
+
+ if self._state.waiting_senders:
+ # Get the item from the next sender
+ send_event, item = self._state.waiting_senders.popitem(last=False)
+ self._state.buffer.append(item)
+ send_event.set()
+
+ if self._state.buffer:
+ return self._state.buffer.popleft()
+ elif not self._state.open_send_channels:
+ raise EndOfStream
+
+ raise WouldBlock
+
+ async def receive(self) -> T_co:
+ await checkpoint()
+ try:
+ return self.receive_nowait()
+ except WouldBlock:
+ # Add ourselves in the queue
+ receive_event = Event()
+ container: list[T_co] = []
+ self._state.waiting_receivers[receive_event] = container
+
+ try:
+ await receive_event.wait()
+ except get_cancelled_exc_class():
+ # Ignore the immediate cancellation if we already received an item, so as not to
+ # lose it
+ if not container:
+ raise
+ finally:
+ self._state.waiting_receivers.pop(receive_event, None)
+
+ if container:
+ return container[0]
+ else:
+ raise EndOfStream
+
+ def clone(self) -> MemoryObjectReceiveStream[T_co]:
+ """
+ Create a clone of this receive stream.
+
+ Each clone can be closed separately. Only when all clones have been closed will the
+ receiving end of the memory stream be considered closed by the sending ends.
+
+ :return: the cloned stream
+
+ """
+ if self._closed:
+ raise ClosedResourceError
+
+ return MemoryObjectReceiveStream(_state=self._state)
+
+ def close(self) -> None:
+ """
+ Close the stream.
+
+ This works the exact same way as :meth:`aclose`, but is provided as a special case for the
+ benefit of synchronous callbacks.
+
+ """
+ if not self._closed:
+ self._closed = True
+ self._state.open_receive_channels -= 1
+ if self._state.open_receive_channels == 0:
+ send_events = list(self._state.waiting_senders.keys())
+ for event in send_events:
+ event.set()
+
+ async def aclose(self) -> None:
+ self.close()
+
+ def statistics(self) -> MemoryObjectStreamStatistics:
+ """
+ Return statistics about the current state of this stream.
+
+ .. versionadded:: 3.0
+ """
+ return self._state.statistics()
+
+ def __enter__(self) -> MemoryObjectReceiveStream[T_co]:
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> None:
+ self.close()
+
+
+@dataclass(eq=False)
+class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]):
+ _state: MemoryObjectStreamState[T_contra]
+ _closed: bool = field(init=False, default=False)
+
+ def __post_init__(self) -> None:
+ self._state.open_send_channels += 1
+
+ def send_nowait(self, item: T_contra) -> DeprecatedAwaitable:
+ """
+ Send an item immediately if it can be done without waiting.
+
+ :param item: the item to send
+ :raises ~anyio.ClosedResourceError: if this send stream has been closed
+ :raises ~anyio.BrokenResourceError: if the stream has been closed from the
+ receiving end
+ :raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting
+ to receive
+
+ """
+ if self._closed:
+ raise ClosedResourceError
+ if not self._state.open_receive_channels:
+ raise BrokenResourceError
+
+ if self._state.waiting_receivers:
+ receive_event, container = self._state.waiting_receivers.popitem(last=False)
+ container.append(item)
+ receive_event.set()
+ elif len(self._state.buffer) < self._state.max_buffer_size:
+ self._state.buffer.append(item)
+ else:
+ raise WouldBlock
+
+ return DeprecatedAwaitable(self.send_nowait)
+
+ async def send(self, item: T_contra) -> None:
+ await checkpoint()
+ try:
+ self.send_nowait(item)
+ except WouldBlock:
+ # Wait until there's someone on the receiving end
+ send_event = Event()
+ self._state.waiting_senders[send_event] = item
+ try:
+ await send_event.wait()
+ except BaseException:
+ self._state.waiting_senders.pop(send_event, None) # type: ignore[arg-type]
+ raise
+
+ if self._state.waiting_senders.pop(send_event, None): # type: ignore[arg-type]
+ raise BrokenResourceError
+
+ def clone(self) -> MemoryObjectSendStream[T_contra]:
+ """
+ Create a clone of this send stream.
+
+ Each clone can be closed separately. Only when all clones have been closed will the
+ sending end of the memory stream be considered closed by the receiving ends.
+
+ :return: the cloned stream
+
+ """
+ if self._closed:
+ raise ClosedResourceError
+
+ return MemoryObjectSendStream(_state=self._state)
+
+ def close(self) -> None:
+ """
+ Close the stream.
+
+ This works the exact same way as :meth:`aclose`, but is provided as a special case for the
+ benefit of synchronous callbacks.
+
+ """
+ if not self._closed:
+ self._closed = True
+ self._state.open_send_channels -= 1
+ if self._state.open_send_channels == 0:
+ receive_events = list(self._state.waiting_receivers.keys())
+ self._state.waiting_receivers.clear()
+ for event in receive_events:
+ event.set()
+
+ async def aclose(self) -> None:
+ self.close()
+
+ def statistics(self) -> MemoryObjectStreamStatistics:
+ """
+ Return statistics about the current state of this stream.
+
+ .. versionadded:: 3.0
+ """
+ return self._state.statistics()
+
+ def __enter__(self) -> MemoryObjectSendStream[T_contra]:
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> None:
+ self.close()
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/stapled.py b/Backend/venv/lib/python3.12/site-packages/anyio/streams/stapled.py
new file mode 100644
index 00000000..1b2862e3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/streams/stapled.py
@@ -0,0 +1,140 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Any, Callable, Generic, Mapping, Sequence, TypeVar
+
+from ..abc import (
+ ByteReceiveStream,
+ ByteSendStream,
+ ByteStream,
+ Listener,
+ ObjectReceiveStream,
+ ObjectSendStream,
+ ObjectStream,
+ TaskGroup,
+)
+
+T_Item = TypeVar("T_Item")
+T_Stream = TypeVar("T_Stream")
+
+
+@dataclass(eq=False)
+class StapledByteStream(ByteStream):
+ """
+ Combines two byte streams into a single, bidirectional byte stream.
+
+ Extra attributes will be provided from both streams, with the receive stream providing the
+ values in case of a conflict.
+
+ :param ByteSendStream send_stream: the sending byte stream
+ :param ByteReceiveStream receive_stream: the receiving byte stream
+ """
+
+ send_stream: ByteSendStream
+ receive_stream: ByteReceiveStream
+
+ async def receive(self, max_bytes: int = 65536) -> bytes:
+ return await self.receive_stream.receive(max_bytes)
+
+ async def send(self, item: bytes) -> None:
+ await self.send_stream.send(item)
+
+ async def send_eof(self) -> None:
+ await self.send_stream.aclose()
+
+ async def aclose(self) -> None:
+ await self.send_stream.aclose()
+ await self.receive_stream.aclose()
+
+ @property
+ def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
+ return {
+ **self.send_stream.extra_attributes,
+ **self.receive_stream.extra_attributes,
+ }
+
+
+@dataclass(eq=False)
+class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]):
+ """
+ Combines two object streams into a single, bidirectional object stream.
+
+ Extra attributes will be provided from both streams, with the receive stream providing the
+ values in case of a conflict.
+
+ :param ObjectSendStream send_stream: the sending object stream
+ :param ObjectReceiveStream receive_stream: the receiving object stream
+ """
+
+ send_stream: ObjectSendStream[T_Item]
+ receive_stream: ObjectReceiveStream[T_Item]
+
+ async def receive(self) -> T_Item:
+ return await self.receive_stream.receive()
+
+ async def send(self, item: T_Item) -> None:
+ await self.send_stream.send(item)
+
+ async def send_eof(self) -> None:
+ await self.send_stream.aclose()
+
+ async def aclose(self) -> None:
+ await self.send_stream.aclose()
+ await self.receive_stream.aclose()
+
+ @property
+ def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
+ return {
+ **self.send_stream.extra_attributes,
+ **self.receive_stream.extra_attributes,
+ }
+
+
+@dataclass(eq=False)
+class MultiListener(Generic[T_Stream], Listener[T_Stream]):
+ """
+ Combines multiple listeners into one, serving connections from all of them at once.
+
+ Any MultiListeners in the given collection of listeners will have their listeners moved into
+ this one.
+
+ Extra attributes are provided from each listener, with each successive listener overriding any
+ conflicting attributes from the previous one.
+
+ :param listeners: listeners to serve
+ :type listeners: Sequence[Listener[T_Stream]]
+ """
+
+ listeners: Sequence[Listener[T_Stream]]
+
+ def __post_init__(self) -> None:
+ listeners: list[Listener[T_Stream]] = []
+ for listener in self.listeners:
+ if isinstance(listener, MultiListener):
+ listeners.extend(listener.listeners)
+ del listener.listeners[:] # type: ignore[attr-defined]
+ else:
+ listeners.append(listener)
+
+ self.listeners = listeners
+
+ async def serve(
+ self, handler: Callable[[T_Stream], Any], task_group: TaskGroup | None = None
+ ) -> None:
+ from .. import create_task_group
+
+ async with create_task_group() as tg:
+ for listener in self.listeners:
+ tg.start_soon(listener.serve, handler, task_group)
+
+ async def aclose(self) -> None:
+ for listener in self.listeners:
+ await listener.aclose()
+
+ @property
+ def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
+ attributes: dict = {}
+ for listener in self.listeners:
+ attributes.update(listener.extra_attributes)
+
+ return attributes
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/text.py b/Backend/venv/lib/python3.12/site-packages/anyio/streams/text.py
new file mode 100644
index 00000000..bba2d3f7
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/streams/text.py
@@ -0,0 +1,143 @@
+from __future__ import annotations
+
+import codecs
+from dataclasses import InitVar, dataclass, field
+from typing import Any, Callable, Mapping
+
+from ..abc import (
+ AnyByteReceiveStream,
+ AnyByteSendStream,
+ AnyByteStream,
+ ObjectReceiveStream,
+ ObjectSendStream,
+ ObjectStream,
+)
+
+
+@dataclass(eq=False)
+class TextReceiveStream(ObjectReceiveStream[str]):
+ """
+ Stream wrapper that decodes bytes to strings using the given encoding.
+
+ Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any completely
+ received unicode characters as soon as they come in.
+
+ :param transport_stream: any bytes-based receive stream
+ :param encoding: character encoding to use for decoding bytes to strings (defaults to
+ ``utf-8``)
+ :param errors: handling scheme for decoding errors (defaults to ``strict``; see the
+ `codecs module documentation`_ for a comprehensive list of options)
+
+ .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects
+ """
+
+ transport_stream: AnyByteReceiveStream
+ encoding: InitVar[str] = "utf-8"
+ errors: InitVar[str] = "strict"
+ _decoder: codecs.IncrementalDecoder = field(init=False)
+
+ def __post_init__(self, encoding: str, errors: str) -> None:
+ decoder_class = codecs.getincrementaldecoder(encoding)
+ self._decoder = decoder_class(errors=errors)
+
+ async def receive(self) -> str:
+ while True:
+ chunk = await self.transport_stream.receive()
+ decoded = self._decoder.decode(chunk)
+ if decoded:
+ return decoded
+
+ async def aclose(self) -> None:
+ await self.transport_stream.aclose()
+ self._decoder.reset()
+
+ @property
+ def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
+ return self.transport_stream.extra_attributes
+
+
+@dataclass(eq=False)
+class TextSendStream(ObjectSendStream[str]):
+ """
+ Sends strings to the wrapped stream as bytes using the given encoding.
+
+ :param AnyByteSendStream transport_stream: any bytes-based send stream
+ :param str encoding: character encoding to use for encoding strings to bytes (defaults to
+ ``utf-8``)
+ :param str errors: handling scheme for encoding errors (defaults to ``strict``; see the
+ `codecs module documentation`_ for a comprehensive list of options)
+
+ .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects
+ """
+
+ transport_stream: AnyByteSendStream
+ encoding: InitVar[str] = "utf-8"
+ errors: str = "strict"
+ _encoder: Callable[..., tuple[bytes, int]] = field(init=False)
+
+ def __post_init__(self, encoding: str) -> None:
+ self._encoder = codecs.getencoder(encoding)
+
+ async def send(self, item: str) -> None:
+ encoded = self._encoder(item, self.errors)[0]
+ await self.transport_stream.send(encoded)
+
+ async def aclose(self) -> None:
+ await self.transport_stream.aclose()
+
+ @property
+ def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
+ return self.transport_stream.extra_attributes
+
+
+@dataclass(eq=False)
+class TextStream(ObjectStream[str]):
+ """
+ A bidirectional stream that decodes bytes to strings on receive and encodes strings to bytes on
+ send.
+
+ Extra attributes will be provided from both streams, with the receive stream providing the
+ values in case of a conflict.
+
+ :param AnyByteStream transport_stream: any bytes-based stream
+ :param str encoding: character encoding to use for encoding/decoding strings to/from bytes
+ (defaults to ``utf-8``)
+ :param str errors: handling scheme for encoding errors (defaults to ``strict``; see the
+ `codecs module documentation`_ for a comprehensive list of options)
+
+ .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects
+ """
+
+ transport_stream: AnyByteStream
+ encoding: InitVar[str] = "utf-8"
+ errors: InitVar[str] = "strict"
+ _receive_stream: TextReceiveStream = field(init=False)
+ _send_stream: TextSendStream = field(init=False)
+
+ def __post_init__(self, encoding: str, errors: str) -> None:
+ self._receive_stream = TextReceiveStream(
+ self.transport_stream, encoding=encoding, errors=errors
+ )
+ self._send_stream = TextSendStream(
+ self.transport_stream, encoding=encoding, errors=errors
+ )
+
+ async def receive(self) -> str:
+ return await self._receive_stream.receive()
+
+ async def send(self, item: str) -> None:
+ await self._send_stream.send(item)
+
+ async def send_eof(self) -> None:
+ await self.transport_stream.send_eof()
+
+ async def aclose(self) -> None:
+ await self._send_stream.aclose()
+ await self._receive_stream.aclose()
+
+ @property
+ def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
+ return {
+ **self._send_stream.extra_attributes,
+ **self._receive_stream.extra_attributes,
+ }
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/streams/tls.py b/Backend/venv/lib/python3.12/site-packages/anyio/streams/tls.py
new file mode 100644
index 00000000..9f9e9fd8
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/streams/tls.py
@@ -0,0 +1,320 @@
+from __future__ import annotations
+
+import logging
+import re
+import ssl
+from dataclasses import dataclass
+from functools import wraps
+from typing import Any, Callable, Mapping, Tuple, TypeVar
+
+from .. import (
+ BrokenResourceError,
+ EndOfStream,
+ aclose_forcefully,
+ get_cancelled_exc_class,
+)
+from .._core._typedattr import TypedAttributeSet, typed_attribute
+from ..abc import AnyByteStream, ByteStream, Listener, TaskGroup
+
+T_Retval = TypeVar("T_Retval")
+_PCTRTT = Tuple[Tuple[str, str], ...]
+_PCTRTTT = Tuple[_PCTRTT, ...]
+
+
+class TLSAttribute(TypedAttributeSet):
+ """Contains Transport Layer Security related attributes."""
+
+ #: the selected ALPN protocol
+ alpn_protocol: str | None = typed_attribute()
+ #: the channel binding for type ``tls-unique``
+ channel_binding_tls_unique: bytes = typed_attribute()
+ #: the selected cipher
+ cipher: tuple[str, str, int] = typed_attribute()
+ #: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert`
+ #: for more information)
+ peer_certificate: dict[str, str | _PCTRTTT | _PCTRTT] | None = typed_attribute()
+ #: the peer certificate in binary form
+ peer_certificate_binary: bytes | None = typed_attribute()
+ #: ``True`` if this is the server side of the connection
+ server_side: bool = typed_attribute()
+ #: ciphers shared by the client during the TLS handshake (``None`` if this is the
+ #: client side)
+ shared_ciphers: list[tuple[str, str, int]] | None = typed_attribute()
+ #: the :class:`~ssl.SSLObject` used for encryption
+ ssl_object: ssl.SSLObject = typed_attribute()
+ #: ``True`` if this stream does (and expects) a closing TLS handshake when the
+ #: stream is being closed
+ standard_compatible: bool = typed_attribute()
+ #: the TLS protocol version (e.g. ``TLSv1.2``)
+ tls_version: str = typed_attribute()
+
+
+@dataclass(eq=False)
+class TLSStream(ByteStream):
+ """
+ A stream wrapper that encrypts all sent data and decrypts received data.
+
+ This class has no public initializer; use :meth:`wrap` instead.
+ All extra attributes from :class:`~TLSAttribute` are supported.
+
+ :var AnyByteStream transport_stream: the wrapped stream
+
+ """
+
+ transport_stream: AnyByteStream
+ standard_compatible: bool
+ _ssl_object: ssl.SSLObject
+ _read_bio: ssl.MemoryBIO
+ _write_bio: ssl.MemoryBIO
+
+ @classmethod
+ async def wrap(
+ cls,
+ transport_stream: AnyByteStream,
+ *,
+ server_side: bool | None = None,
+ hostname: str | None = None,
+ ssl_context: ssl.SSLContext | None = None,
+ standard_compatible: bool = True,
+ ) -> TLSStream:
+ """
+ Wrap an existing stream with Transport Layer Security.
+
+ This performs a TLS handshake with the peer.
+
+ :param transport_stream: a bytes-transporting stream to wrap
+ :param server_side: ``True`` if this is the server side of the connection,
+ ``False`` if this is the client side (if omitted, will be set to ``False``
+ if ``hostname`` has been provided, ``False`` otherwise). Used only to create
+ a default context when an explicit context has not been provided.
+ :param hostname: host name of the peer (if host name checking is desired)
+ :param ssl_context: the SSLContext object to use (if not provided, a secure
+ default will be created)
+ :param standard_compatible: if ``False``, skip the closing handshake when closing the
+ connection, and don't raise an exception if the peer does the same
+ :raises ~ssl.SSLError: if the TLS handshake fails
+
+ """
+ if server_side is None:
+ server_side = not hostname
+
+ if not ssl_context:
+ purpose = (
+ ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH
+ )
+ ssl_context = ssl.create_default_context(purpose)
+
+ # Re-enable detection of unexpected EOFs if it was disabled by Python
+ if hasattr(ssl, "OP_IGNORE_UNEXPECTED_EOF"):
+ ssl_context.options &= ~ssl.OP_IGNORE_UNEXPECTED_EOF
+
+ bio_in = ssl.MemoryBIO()
+ bio_out = ssl.MemoryBIO()
+ ssl_object = ssl_context.wrap_bio(
+ bio_in, bio_out, server_side=server_side, server_hostname=hostname
+ )
+ wrapper = cls(
+ transport_stream=transport_stream,
+ standard_compatible=standard_compatible,
+ _ssl_object=ssl_object,
+ _read_bio=bio_in,
+ _write_bio=bio_out,
+ )
+ await wrapper._call_sslobject_method(ssl_object.do_handshake)
+ return wrapper
+
+ async def _call_sslobject_method(
+ self, func: Callable[..., T_Retval], *args: object
+ ) -> T_Retval:
+ while True:
+ try:
+ result = func(*args)
+ except ssl.SSLWantReadError:
+ try:
+ # Flush any pending writes first
+ if self._write_bio.pending:
+ await self.transport_stream.send(self._write_bio.read())
+
+ data = await self.transport_stream.receive()
+ except EndOfStream:
+ self._read_bio.write_eof()
+ except OSError as exc:
+ self._read_bio.write_eof()
+ self._write_bio.write_eof()
+ raise BrokenResourceError from exc
+ else:
+ self._read_bio.write(data)
+ except ssl.SSLWantWriteError:
+ await self.transport_stream.send(self._write_bio.read())
+ except ssl.SSLSyscallError as exc:
+ self._read_bio.write_eof()
+ self._write_bio.write_eof()
+ raise BrokenResourceError from exc
+ except ssl.SSLError as exc:
+ self._read_bio.write_eof()
+ self._write_bio.write_eof()
+ if (
+ isinstance(exc, ssl.SSLEOFError)
+ or "UNEXPECTED_EOF_WHILE_READING" in exc.strerror
+ ):
+ if self.standard_compatible:
+ raise BrokenResourceError from exc
+ else:
+ raise EndOfStream from None
+
+ raise
+ else:
+ # Flush any pending writes first
+ if self._write_bio.pending:
+ await self.transport_stream.send(self._write_bio.read())
+
+ return result
+
+ async def unwrap(self) -> tuple[AnyByteStream, bytes]:
+ """
+ Does the TLS closing handshake.
+
+ :return: a tuple of (wrapped byte stream, bytes left in the read buffer)
+
+ """
+ await self._call_sslobject_method(self._ssl_object.unwrap)
+ self._read_bio.write_eof()
+ self._write_bio.write_eof()
+ return self.transport_stream, self._read_bio.read()
+
+ async def aclose(self) -> None:
+ if self.standard_compatible:
+ try:
+ await self.unwrap()
+ except BaseException:
+ await aclose_forcefully(self.transport_stream)
+ raise
+
+ await self.transport_stream.aclose()
+
+ async def receive(self, max_bytes: int = 65536) -> bytes:
+ data = await self._call_sslobject_method(self._ssl_object.read, max_bytes)
+ if not data:
+ raise EndOfStream
+
+ return data
+
+ async def send(self, item: bytes) -> None:
+ await self._call_sslobject_method(self._ssl_object.write, item)
+
+ async def send_eof(self) -> None:
+ tls_version = self.extra(TLSAttribute.tls_version)
+ match = re.match(r"TLSv(\d+)(?:\.(\d+))?", tls_version)
+ if match:
+ major, minor = int(match.group(1)), int(match.group(2) or 0)
+ if (major, minor) < (1, 3):
+ raise NotImplementedError(
+ f"send_eof() requires at least TLSv1.3; current "
+ f"session uses {tls_version}"
+ )
+
+ raise NotImplementedError(
+ "send_eof() has not yet been implemented for TLS streams"
+ )
+
+ @property
+ def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
+ return {
+ **self.transport_stream.extra_attributes,
+ TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol,
+ TLSAttribute.channel_binding_tls_unique: self._ssl_object.get_channel_binding,
+ TLSAttribute.cipher: self._ssl_object.cipher,
+ TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False),
+ TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert(
+ True
+ ),
+ TLSAttribute.server_side: lambda: self._ssl_object.server_side,
+ TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers()
+ if self._ssl_object.server_side
+ else None,
+ TLSAttribute.standard_compatible: lambda: self.standard_compatible,
+ TLSAttribute.ssl_object: lambda: self._ssl_object,
+ TLSAttribute.tls_version: self._ssl_object.version,
+ }
+
+
+@dataclass(eq=False)
+class TLSListener(Listener[TLSStream]):
+ """
+ A convenience listener that wraps another listener and auto-negotiates a TLS session on every
+ accepted connection.
+
+ If the TLS handshake times out or raises an exception, :meth:`handle_handshake_error` is
+ called to do whatever post-mortem processing is deemed necessary.
+
+ Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute.
+
+ :param Listener listener: the listener to wrap
+ :param ssl_context: the SSL context object
+ :param standard_compatible: a flag passed through to :meth:`TLSStream.wrap`
+ :param handshake_timeout: time limit for the TLS handshake
+ (passed to :func:`~anyio.fail_after`)
+ """
+
+ listener: Listener[Any]
+ ssl_context: ssl.SSLContext
+ standard_compatible: bool = True
+ handshake_timeout: float = 30
+
+ @staticmethod
+ async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None:
+ """
+ Handle an exception raised during the TLS handshake.
+
+ This method does 3 things:
+
+ #. Forcefully closes the original stream
+ #. Logs the exception (unless it was a cancellation exception) using the
+ ``anyio.streams.tls`` logger
+ #. Reraises the exception if it was a base exception or a cancellation exception
+
+ :param exc: the exception
+ :param stream: the original stream
+
+ """
+ await aclose_forcefully(stream)
+
+ # Log all except cancellation exceptions
+ if not isinstance(exc, get_cancelled_exc_class()):
+ logging.getLogger(__name__).exception("Error during TLS handshake")
+
+ # Only reraise base exceptions and cancellation exceptions
+ if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()):
+ raise
+
+ async def serve(
+ self,
+ handler: Callable[[TLSStream], Any],
+ task_group: TaskGroup | None = None,
+ ) -> None:
+ @wraps(handler)
+ async def handler_wrapper(stream: AnyByteStream) -> None:
+ from .. import fail_after
+
+ try:
+ with fail_after(self.handshake_timeout):
+ wrapped_stream = await TLSStream.wrap(
+ stream,
+ ssl_context=self.ssl_context,
+ standard_compatible=self.standard_compatible,
+ )
+ except BaseException as exc:
+ await self.handle_handshake_error(exc, stream)
+ else:
+ await handler(wrapped_stream)
+
+ await self.listener.serve(handler_wrapper, task_group)
+
+ async def aclose(self) -> None:
+ await self.listener.aclose()
+
+ @property
+ def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
+ return {
+ TLSAttribute.standard_compatible: lambda: self.standard_compatible,
+ }
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/to_process.py b/Backend/venv/lib/python3.12/site-packages/anyio/to_process.py
new file mode 100644
index 00000000..7ba9d441
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/to_process.py
@@ -0,0 +1,249 @@
+from __future__ import annotations
+
+import os
+import pickle
+import subprocess
+import sys
+from collections import deque
+from importlib.util import module_from_spec, spec_from_file_location
+from typing import Callable, TypeVar, cast
+
+from ._core._eventloop import current_time, get_asynclib, get_cancelled_exc_class
+from ._core._exceptions import BrokenWorkerProcess
+from ._core._subprocesses import open_process
+from ._core._synchronization import CapacityLimiter
+from ._core._tasks import CancelScope, fail_after
+from .abc import ByteReceiveStream, ByteSendStream, Process
+from .lowlevel import RunVar, checkpoint_if_cancelled
+from .streams.buffered import BufferedByteReceiveStream
+
+WORKER_MAX_IDLE_TIME = 300 # 5 minutes
+
+T_Retval = TypeVar("T_Retval")
+_process_pool_workers: RunVar[set[Process]] = RunVar("_process_pool_workers")
+_process_pool_idle_workers: RunVar[deque[tuple[Process, float]]] = RunVar(
+ "_process_pool_idle_workers"
+)
+_default_process_limiter: RunVar[CapacityLimiter] = RunVar("_default_process_limiter")
+
+
+async def run_sync(
+ func: Callable[..., T_Retval],
+ *args: object,
+ cancellable: bool = False,
+ limiter: CapacityLimiter | None = None,
+) -> T_Retval:
+ """
+ Call the given function with the given arguments in a worker process.
+
+ If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled,
+ the worker process running it will be abruptly terminated using SIGKILL (or
+ ``terminateProcess()`` on Windows).
+
+ :param func: a callable
+ :param args: positional arguments for the callable
+ :param cancellable: ``True`` to allow cancellation of the operation while it's running
+ :param limiter: capacity limiter to use to limit the total amount of processes running
+ (if omitted, the default limiter is used)
+ :return: an awaitable that yields the return value of the function.
+
+ """
+
+ async def send_raw_command(pickled_cmd: bytes) -> object:
+ try:
+ await stdin.send(pickled_cmd)
+ response = await buffered.receive_until(b"\n", 50)
+ status, length = response.split(b" ")
+ if status not in (b"RETURN", b"EXCEPTION"):
+ raise RuntimeError(
+ f"Worker process returned unexpected response: {response!r}"
+ )
+
+ pickled_response = await buffered.receive_exactly(int(length))
+ except BaseException as exc:
+ workers.discard(process)
+ try:
+ process.kill()
+ with CancelScope(shield=True):
+ await process.aclose()
+ except ProcessLookupError:
+ pass
+
+ if isinstance(exc, get_cancelled_exc_class()):
+ raise
+ else:
+ raise BrokenWorkerProcess from exc
+
+ retval = pickle.loads(pickled_response)
+ if status == b"EXCEPTION":
+ assert isinstance(retval, BaseException)
+ raise retval
+ else:
+ return retval
+
+ # First pickle the request before trying to reserve a worker process
+ await checkpoint_if_cancelled()
+ request = pickle.dumps(("run", func, args), protocol=pickle.HIGHEST_PROTOCOL)
+
+ # If this is the first run in this event loop thread, set up the necessary variables
+ try:
+ workers = _process_pool_workers.get()
+ idle_workers = _process_pool_idle_workers.get()
+ except LookupError:
+ workers = set()
+ idle_workers = deque()
+ _process_pool_workers.set(workers)
+ _process_pool_idle_workers.set(idle_workers)
+ get_asynclib().setup_process_pool_exit_at_shutdown(workers)
+
+ async with (limiter or current_default_process_limiter()):
+ # Pop processes from the pool (starting from the most recently used) until we find one that
+ # hasn't exited yet
+ process: Process
+ while idle_workers:
+ process, idle_since = idle_workers.pop()
+ if process.returncode is None:
+ stdin = cast(ByteSendStream, process.stdin)
+ buffered = BufferedByteReceiveStream(
+ cast(ByteReceiveStream, process.stdout)
+ )
+
+ # Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME seconds or
+ # longer
+ now = current_time()
+ killed_processes: list[Process] = []
+ while idle_workers:
+ if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME:
+ break
+
+ process, idle_since = idle_workers.popleft()
+ process.kill()
+ workers.remove(process)
+ killed_processes.append(process)
+
+ with CancelScope(shield=True):
+ for process in killed_processes:
+ await process.aclose()
+
+ break
+
+ workers.remove(process)
+ else:
+ command = [sys.executable, "-u", "-m", __name__]
+ process = await open_process(
+ command, stdin=subprocess.PIPE, stdout=subprocess.PIPE
+ )
+ try:
+ stdin = cast(ByteSendStream, process.stdin)
+ buffered = BufferedByteReceiveStream(
+ cast(ByteReceiveStream, process.stdout)
+ )
+ with fail_after(20):
+ message = await buffered.receive(6)
+
+ if message != b"READY\n":
+ raise BrokenWorkerProcess(
+ f"Worker process returned unexpected response: {message!r}"
+ )
+
+ main_module_path = getattr(sys.modules["__main__"], "__file__", None)
+ pickled = pickle.dumps(
+ ("init", sys.path, main_module_path),
+ protocol=pickle.HIGHEST_PROTOCOL,
+ )
+ await send_raw_command(pickled)
+ except (BrokenWorkerProcess, get_cancelled_exc_class()):
+ raise
+ except BaseException as exc:
+ process.kill()
+ raise BrokenWorkerProcess(
+ "Error during worker process initialization"
+ ) from exc
+
+ workers.add(process)
+
+ with CancelScope(shield=not cancellable):
+ try:
+ return cast(T_Retval, await send_raw_command(request))
+ finally:
+ if process in workers:
+ idle_workers.append((process, current_time()))
+
+
+def current_default_process_limiter() -> CapacityLimiter:
+ """
+ Return the capacity limiter that is used by default to limit the number of worker processes.
+
+ :return: a capacity limiter object
+
+ """
+ try:
+ return _default_process_limiter.get()
+ except LookupError:
+ limiter = CapacityLimiter(os.cpu_count() or 2)
+ _default_process_limiter.set(limiter)
+ return limiter
+
+
+def process_worker() -> None:
+ # Redirect standard streams to os.devnull so that user code won't interfere with the
+ # parent-worker communication
+ stdin = sys.stdin
+ stdout = sys.stdout
+ sys.stdin = open(os.devnull)
+ sys.stdout = open(os.devnull, "w")
+
+ stdout.buffer.write(b"READY\n")
+ while True:
+ retval = exception = None
+ try:
+ command, *args = pickle.load(stdin.buffer)
+ except EOFError:
+ return
+ except BaseException as exc:
+ exception = exc
+ else:
+ if command == "run":
+ func, args = args
+ try:
+ retval = func(*args)
+ except BaseException as exc:
+ exception = exc
+ elif command == "init":
+ main_module_path: str | None
+ sys.path, main_module_path = args
+ del sys.modules["__main__"]
+ if main_module_path:
+ # Load the parent's main module but as __mp_main__ instead of __main__
+ # (like multiprocessing does) to avoid infinite recursion
+ try:
+ spec = spec_from_file_location("__mp_main__", main_module_path)
+ if spec and spec.loader:
+ main = module_from_spec(spec)
+ spec.loader.exec_module(main)
+ sys.modules["__main__"] = main
+ except BaseException as exc:
+ exception = exc
+
+ try:
+ if exception is not None:
+ status = b"EXCEPTION"
+ pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL)
+ else:
+ status = b"RETURN"
+ pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL)
+ except BaseException as exc:
+ exception = exc
+ status = b"EXCEPTION"
+ pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL)
+
+ stdout.buffer.write(b"%s %d\n" % (status, len(pickled)))
+ stdout.buffer.write(pickled)
+
+ # Respect SIGTERM
+ if isinstance(exception, SystemExit):
+ raise exception
+
+
+if __name__ == "__main__":
+ process_worker()
diff --git a/Backend/venv/lib/python3.12/site-packages/anyio/to_thread.py b/Backend/venv/lib/python3.12/site-packages/anyio/to_thread.py
new file mode 100644
index 00000000..9315d1ec
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/anyio/to_thread.py
@@ -0,0 +1,67 @@
+from __future__ import annotations
+
+from typing import Callable, TypeVar
+from warnings import warn
+
+from ._core._eventloop import get_asynclib
+from .abc import CapacityLimiter
+
+T_Retval = TypeVar("T_Retval")
+
+
+async def run_sync(
+ func: Callable[..., T_Retval],
+ *args: object,
+ cancellable: bool = False,
+ limiter: CapacityLimiter | None = None,
+) -> T_Retval:
+ """
+ Call the given function with the given arguments in a worker thread.
+
+ If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled,
+ the thread will still run its course but its return value (or any raised exception) will be
+ ignored.
+
+ :param func: a callable
+ :param args: positional arguments for the callable
+ :param cancellable: ``True`` to allow cancellation of the operation
+ :param limiter: capacity limiter to use to limit the total amount of threads running
+ (if omitted, the default limiter is used)
+ :return: an awaitable that yields the return value of the function.
+
+ """
+ return await get_asynclib().run_sync_in_worker_thread(
+ func, *args, cancellable=cancellable, limiter=limiter
+ )
+
+
+async def run_sync_in_worker_thread(
+ func: Callable[..., T_Retval],
+ *args: object,
+ cancellable: bool = False,
+ limiter: CapacityLimiter | None = None,
+) -> T_Retval:
+ warn(
+ "run_sync_in_worker_thread() has been deprecated, use anyio.to_thread.run_sync() instead",
+ DeprecationWarning,
+ )
+ return await run_sync(func, *args, cancellable=cancellable, limiter=limiter)
+
+
+def current_default_thread_limiter() -> CapacityLimiter:
+ """
+ Return the capacity limiter that is used by default to limit the number of concurrent threads.
+
+ :return: a capacity limiter object
+
+ """
+ return get_asynclib().current_default_thread_limiter()
+
+
+def current_default_worker_thread_limiter() -> CapacityLimiter:
+ warn(
+ "current_default_worker_thread_limiter() has been deprecated, "
+ "use anyio.to_thread.current_default_thread_limiter() instead",
+ DeprecationWarning,
+ )
+ return current_default_thread_limiter()
diff --git a/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/LICENSE b/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/LICENSE
new file mode 100644
index 00000000..11069edd
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/METADATA
new file mode 100644
index 00000000..77e35dd8
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/METADATA
@@ -0,0 +1,308 @@
+Metadata-Version: 2.1
+Name: bcrypt
+Version: 4.1.2
+Summary: Modern password hashing for your software and your servers
+Author-email: The Python Cryptographic Authority developers
+License: Apache-2.0
+Project-URL: homepage, https://github.com/pyca/bcrypt/
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Provides-Extra: tests
+Requires-Dist: pytest !=3.3.0,>=3.2.1 ; extra == 'tests'
+Provides-Extra: typecheck
+Requires-Dist: mypy ; extra == 'typecheck'
+
+bcrypt
+======
+
+.. image:: https://img.shields.io/pypi/v/bcrypt.svg
+ :target: https://pypi.org/project/bcrypt/
+ :alt: Latest Version
+
+.. image:: https://github.com/pyca/bcrypt/workflows/CI/badge.svg?branch=main
+ :target: https://github.com/pyca/bcrypt/actions?query=workflow%3ACI+branch%3Amain
+
+Acceptable password hashing for your software and your servers (but you should
+really use argon2id or scrypt)
+
+
+Installation
+============
+
+To install bcrypt, simply:
+
+.. code:: bash
+
+ $ pip install bcrypt
+
+Note that bcrypt should build very easily on Linux provided you have a C
+compiler and a Rust compiler (the minimum supported Rust version is 1.56.0).
+
+For Debian and Ubuntu, the following command will ensure that the required dependencies are installed:
+
+.. code:: bash
+
+ $ sudo apt-get install build-essential cargo
+
+For Fedora and RHEL-derivatives, the following command will ensure that the required dependencies are installed:
+
+.. code:: bash
+
+ $ sudo yum install gcc cargo
+
+For Alpine, the following command will ensure that the required dependencies are installed:
+
+.. code:: bash
+
+ $ apk add --update musl-dev gcc cargo
+
+
+Alternatives
+============
+
+While bcrypt remains an acceptable choice for password storage, depending on your specific use case you may also want to consider using scrypt (either via `standard library`_ or `cryptography`_) or argon2id via `argon2_cffi`_.
+
+Changelog
+=========
+
+4.1.2
+-----
+
+* Publish both ``py37`` and ``py39`` wheels. This should resolve some errors
+ relating to initializing a module multiple times per process.
+
+4.1.1
+-----
+
+* Fixed the type signature on the ``kdf`` method.
+* Fixed packaging bug on Windows.
+* Fixed incompatibility with passlib package detection assumptions.
+
+4.1.0
+-----
+
+* Dropped support for Python 3.6.
+* Bumped MSRV to 1.64. (Note: Rust 1.63 can be used by setting the ``BCRYPT_ALLOW_RUST_163`` environment variable)
+
+4.0.1
+-----
+
+* We now build PyPy ``manylinux`` wheels.
+* Fixed a bug where passing an invalid ``salt`` to ``checkpw`` could result in
+ a ``pyo3_runtime.PanicException``. It now correctly raises a ``ValueError``.
+
+4.0.0
+-----
+
+* ``bcrypt`` is now implemented in Rust. Users building from source will need
+ to have a Rust compiler available. Nothing will change for users downloading
+ wheels.
+* We no longer ship ``manylinux2010`` wheels. Users should upgrade to the latest
+ ``pip`` to ensure this doesn’t cause issues downloading wheels on their
+ platform. We now ship ``manylinux_2_28`` wheels for users on new enough platforms.
+* ``NUL`` bytes are now allowed in inputs.
+
+
+3.2.2
+-----
+
+* Fixed packaging of ``py.typed`` files in wheels so that ``mypy`` works.
+
+3.2.1
+-----
+
+* Added support for compilation on z/OS
+* The next release of ``bcrypt`` with be 4.0 and it will require Rust at
+ compile time, for users building from source. There will be no additional
+ requirement for users who are installing from wheels. Users on most
+ platforms will be able to obtain a wheel by making sure they have an up to
+ date ``pip``. The minimum supported Rust version will be 1.56.0.
+* This will be the final release for which we ship ``manylinux2010`` wheels.
+ Going forward the minimum supported manylinux ABI for our wheels will be
+ ``manylinux2014``. The vast majority of users will continue to receive
+ ``manylinux`` wheels provided they have an up to date ``pip``.
+
+
+3.2.0
+-----
+
+* Added typehints for library functions.
+* Dropped support for Python versions less than 3.6 (2.7, 3.4, 3.5).
+* Shipped ``abi3`` Windows wheels (requires pip >= 20).
+
+3.1.7
+-----
+
+* Set a ``setuptools`` lower bound for PEP517 wheel building.
+* We no longer distribute 32-bit ``manylinux1`` wheels. Continuing to produce
+ them was a maintenance burden.
+
+3.1.6
+-----
+
+* Added support for compilation on Haiku.
+
+3.1.5
+-----
+
+* Added support for compilation on AIX.
+* Dropped Python 2.6 and 3.3 support.
+* Switched to using ``abi3`` wheels for Python 3. If you are not getting a
+ wheel on a compatible platform please upgrade your ``pip`` version.
+
+3.1.4
+-----
+
+* Fixed compilation with mingw and on illumos.
+
+3.1.3
+-----
+* Fixed a compilation issue on Solaris.
+* Added a warning when using too few rounds with ``kdf``.
+
+3.1.2
+-----
+* Fixed a compile issue affecting big endian platforms.
+* Fixed invalid escape sequence warnings on Python 3.6.
+* Fixed building in non-UTF8 environments on Python 2.
+
+3.1.1
+-----
+* Resolved a ``UserWarning`` when used with ``cffi`` 1.8.3.
+
+3.1.0
+-----
+* Added support for ``checkpw``, a convenience method for verifying a password.
+* Ensure that you get a ``$2y$`` hash when you input a ``$2y$`` salt.
+* Fixed a regression where ``$2a`` hashes were vulnerable to a wraparound bug.
+* Fixed compilation under Alpine Linux.
+
+3.0.0
+-----
+* Switched the C backend to code obtained from the OpenBSD project rather than
+ openwall.
+* Added support for ``bcrypt_pbkdf`` via the ``kdf`` function.
+
+2.0.0
+-----
+* Added support for an adjustible prefix when calling ``gensalt``.
+* Switched to CFFI 1.0+
+
+Usage
+-----
+
+Password Hashing
+~~~~~~~~~~~~~~~~
+
+Hashing and then later checking that a password matches the previous hashed
+password is very simple:
+
+.. code:: pycon
+
+ >>> import bcrypt
+ >>> password = b"super secret password"
+ >>> # Hash a password for the first time, with a randomly-generated salt
+ >>> hashed = bcrypt.hashpw(password, bcrypt.gensalt())
+ >>> # Check that an unhashed password matches one that has previously been
+ >>> # hashed
+ >>> if bcrypt.checkpw(password, hashed):
+ ... print("It Matches!")
+ ... else:
+ ... print("It Does not Match :(")
+
+KDF
+~~~
+
+As of 3.0.0 ``bcrypt`` now offers a ``kdf`` function which does ``bcrypt_pbkdf``.
+This KDF is used in OpenSSH's newer encrypted private key format.
+
+.. code:: pycon
+
+ >>> import bcrypt
+ >>> key = bcrypt.kdf(
+ ... password=b'password',
+ ... salt=b'salt',
+ ... desired_key_bytes=32,
+ ... rounds=100)
+
+
+Adjustable Work Factor
+~~~~~~~~~~~~~~~~~~~~~~
+One of bcrypt's features is an adjustable logarithmic work factor. To adjust
+the work factor merely pass the desired number of rounds to
+``bcrypt.gensalt(rounds=12)`` which defaults to 12):
+
+.. code:: pycon
+
+ >>> import bcrypt
+ >>> password = b"super secret password"
+ >>> # Hash a password for the first time, with a certain number of rounds
+ >>> hashed = bcrypt.hashpw(password, bcrypt.gensalt(14))
+ >>> # Check that a unhashed password matches one that has previously been
+ >>> # hashed
+ >>> if bcrypt.checkpw(password, hashed):
+ ... print("It Matches!")
+ ... else:
+ ... print("It Does not Match :(")
+
+
+Adjustable Prefix
+~~~~~~~~~~~~~~~~~
+
+Another one of bcrypt's features is an adjustable prefix to let you define what
+libraries you'll remain compatible with. To adjust this, pass either ``2a`` or
+``2b`` (the default) to ``bcrypt.gensalt(prefix=b"2b")`` as a bytes object.
+
+As of 3.0.0 the ``$2y$`` prefix is still supported in ``hashpw`` but deprecated.
+
+Maximum Password Length
+~~~~~~~~~~~~~~~~~~~~~~~
+
+The bcrypt algorithm only handles passwords up to 72 characters, any characters
+beyond that are ignored. To work around this, a common approach is to hash a
+password with a cryptographic hash (such as ``sha256``) and then base64
+encode it to prevent NULL byte problems before hashing the result with
+``bcrypt``:
+
+.. code:: pycon
+
+ >>> password = b"an incredibly long password" * 10
+ >>> hashed = bcrypt.hashpw(
+ ... base64.b64encode(hashlib.sha256(password).digest()),
+ ... bcrypt.gensalt()
+ ... )
+
+Compatibility
+-------------
+
+This library should be compatible with py-bcrypt and it will run on Python
+3.6+, and PyPy 3.
+
+C Code
+------
+
+This library uses code from OpenBSD.
+
+Security
+--------
+
+``bcrypt`` follows the `same security policy as cryptography`_, if you
+identify a vulnerability, we ask you to contact us privately.
+
+.. _`same security policy as cryptography`: https://cryptography.io/en/latest/security.html
+.. _`standard library`: https://docs.python.org/3/library/hashlib.html#hashlib.scrypt
+.. _`argon2_cffi`: https://argon2-cffi.readthedocs.io
+.. _`cryptography`: https://cryptography.io/en/latest/hazmat/primitives/key-derivation-functions/#cryptography.hazmat.primitives.kdf.scrypt.Scrypt
diff --git a/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/RECORD
new file mode 100644
index 00000000..9a13be39
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/RECORD
@@ -0,0 +1,12 @@
+bcrypt-4.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+bcrypt-4.1.2.dist-info/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
+bcrypt-4.1.2.dist-info/METADATA,sha256=mPfD7FNscexT6eVKc8waYS46ZoVUt6B4zzc9KOjhSbQ,9475
+bcrypt-4.1.2.dist-info/RECORD,,
+bcrypt-4.1.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+bcrypt-4.1.2.dist-info/WHEEL,sha256=fEuQ8QlbrrJCrqZHpLEt4l5e2x7mPqr9tlylZpVIKhY,112
+bcrypt-4.1.2.dist-info/top_level.txt,sha256=BkR_qBzDbSuycMzHWE1vzXrfYecAzUVmQs6G2CukqNI,7
+bcrypt/__init__.py,sha256=zTtuqGGQxDgxcqm1f_0UbbPS6uCl-WxL98gSYDMSUbw,1000
+bcrypt/__init__.pyi,sha256=ITUCB9mPVU8sKUbJQMDUH5YfQXZb1O55F9qvKZR_o8I,333
+bcrypt/__pycache__/__init__.cpython-312.pyc,,
+bcrypt/_bcrypt.abi3.so,sha256=Rr8SORrFq9wGWPie6P2vn_R5T2VCUwtWZQWU_WonARI,2464800
+bcrypt/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/REQUESTED b/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/REQUESTED
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/WHEEL
new file mode 100644
index 00000000..40d9d7a8
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.42.0)
+Root-Is-Purelib: false
+Tag: cp39-abi3-manylinux_2_28_x86_64
+
diff --git a/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/top_level.txt b/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/top_level.txt
new file mode 100644
index 00000000..7f0b6e75
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/bcrypt-4.1.2.dist-info/top_level.txt
@@ -0,0 +1 @@
+bcrypt
diff --git a/Backend/venv/lib/python3.12/site-packages/bcrypt/__init__.py b/Backend/venv/lib/python3.12/site-packages/bcrypt/__init__.py
new file mode 100644
index 00000000..c2019344
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/bcrypt/__init__.py
@@ -0,0 +1,43 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ._bcrypt import (
+ __author__,
+ __copyright__,
+ __email__,
+ __license__,
+ __summary__,
+ __title__,
+ __uri__,
+ checkpw,
+ gensalt,
+ hashpw,
+ kdf,
+)
+from ._bcrypt import (
+ __version_ex__ as __version__,
+)
+
+__all__ = [
+ "gensalt",
+ "hashpw",
+ "checkpw",
+ "kdf",
+ "__title__",
+ "__summary__",
+ "__uri__",
+ "__version__",
+ "__author__",
+ "__email__",
+ "__license__",
+ "__copyright__",
+]
diff --git a/Backend/venv/lib/python3.12/site-packages/bcrypt/__init__.pyi b/Backend/venv/lib/python3.12/site-packages/bcrypt/__init__.pyi
new file mode 100644
index 00000000..12e4a2ef
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/bcrypt/__init__.pyi
@@ -0,0 +1,10 @@
+def gensalt(rounds: int = 12, prefix: bytes = b"2b") -> bytes: ...
+def hashpw(password: bytes, salt: bytes) -> bytes: ...
+def checkpw(password: bytes, hashed_password: bytes) -> bool: ...
+def kdf(
+ password: bytes,
+ salt: bytes,
+ desired_key_bytes: int,
+ rounds: int,
+ ignore_few_rounds: bool = False,
+) -> bytes: ...
diff --git a/Backend/venv/lib/python3.12/site-packages/bcrypt/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/bcrypt/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..6aac4101
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/bcrypt/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/bcrypt/_bcrypt.abi3.so b/Backend/venv/lib/python3.12/site-packages/bcrypt/_bcrypt.abi3.so
new file mode 100755
index 00000000..31585b7e
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/bcrypt/_bcrypt.abi3.so differ
diff --git a/Backend/venv/lib/python3.12/site-packages/bcrypt/py.typed b/Backend/venv/lib/python3.12/site-packages/bcrypt/py.typed
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/METADATA
new file mode 100644
index 00000000..67508e56
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/METADATA
@@ -0,0 +1,68 @@
+Metadata-Version: 2.4
+Name: cffi
+Version: 2.0.0
+Summary: Foreign Function Interface for Python calling C code.
+Author: Armin Rigo, Maciej Fijalkowski
+Maintainer: Matt Davis, Matt Clay, Matti Picus
+License-Expression: MIT
+Project-URL: Documentation, https://cffi.readthedocs.io/
+Project-URL: Changelog, https://cffi.readthedocs.io/en/latest/whatsnew.html
+Project-URL: Downloads, https://github.com/python-cffi/cffi/releases
+Project-URL: Contact, https://groups.google.com/forum/#!forum/python-cffi
+Project-URL: Source Code, https://github.com/python-cffi/cffi
+Project-URL: Issue Tracker, https://github.com/python-cffi/cffi/issues
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Classifier: Programming Language :: Python :: Free Threading :: 2 - Beta
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Requires-Python: >=3.9
+Description-Content-Type: text/markdown
+License-File: LICENSE
+License-File: AUTHORS
+Requires-Dist: pycparser; implementation_name != "PyPy"
+Dynamic: license-file
+
+[](https://github.com/python-cffi/cffi/actions/workflows/ci.yaml?query=branch%3Amain++)
+[](https://pypi.org/project/cffi)
+[][Documentation]
+
+
+CFFI
+====
+
+Foreign Function Interface for Python calling C code.
+
+Please see the [Documentation] or uncompiled in the `doc/` subdirectory.
+
+Download
+--------
+
+[Download page](https://github.com/python-cffi/cffi/releases)
+
+Source Code
+-----------
+
+Source code is publicly available on
+[GitHub](https://github.com/python-cffi/cffi).
+
+Contact
+-------
+
+[Mailing list](https://groups.google.com/forum/#!forum/python-cffi)
+
+Testing/development tips
+------------------------
+
+After `git clone` or `wget && tar`, we will get a directory called `cffi` or `cffi-x.x.x`. we call it `repo-directory`. To run tests under CPython, run the following in the `repo-directory`:
+
+ pip install pytest
+ pip install -e . # editable install of CFFI for local development
+ pytest src/c/ testing/
+
+[Documentation]: http://cffi.readthedocs.org/
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/RECORD
new file mode 100644
index 00000000..6f822989
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/RECORD
@@ -0,0 +1,49 @@
+_cffi_backend.cpython-312-x86_64-linux-gnu.so,sha256=AGLtw5fn9u4Cmwk3BbGlsXG7VZEvQekABMyEGuRZmcE,348808
+cffi-2.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+cffi-2.0.0.dist-info/METADATA,sha256=uYzn40F68Im8EtXHNBLZs7FoPM-OxzyYbDWsjJvhujk,2559
+cffi-2.0.0.dist-info/RECORD,,
+cffi-2.0.0.dist-info/WHEEL,sha256=aSgG0F4rGPZtV0iTEIfy6dtHq6g67Lze3uLfk0vWn88,151
+cffi-2.0.0.dist-info/entry_points.txt,sha256=y6jTxnyeuLnL-XJcDv8uML3n6wyYiGRg8MTp_QGJ9Ho,75
+cffi-2.0.0.dist-info/licenses/AUTHORS,sha256=KmemC7-zN1nWfWRf8TG45ta8TK_CMtdR_Kw-2k0xTMg,208
+cffi-2.0.0.dist-info/licenses/LICENSE,sha256=W6JN3FcGf5JJrdZEw6_EGl1tw34jQz73Wdld83Cwr2M,1123
+cffi-2.0.0.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19
+cffi/__init__.py,sha256=-ksBQ7MfDzVvbBlV_ftYBWAmEqfA86ljIzMxzaZeAlI,511
+cffi/__pycache__/__init__.cpython-312.pyc,,
+cffi/__pycache__/_imp_emulation.cpython-312.pyc,,
+cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc,,
+cffi/__pycache__/api.cpython-312.pyc,,
+cffi/__pycache__/backend_ctypes.cpython-312.pyc,,
+cffi/__pycache__/cffi_opcode.cpython-312.pyc,,
+cffi/__pycache__/commontypes.cpython-312.pyc,,
+cffi/__pycache__/cparser.cpython-312.pyc,,
+cffi/__pycache__/error.cpython-312.pyc,,
+cffi/__pycache__/ffiplatform.cpython-312.pyc,,
+cffi/__pycache__/lock.cpython-312.pyc,,
+cffi/__pycache__/model.cpython-312.pyc,,
+cffi/__pycache__/pkgconfig.cpython-312.pyc,,
+cffi/__pycache__/recompiler.cpython-312.pyc,,
+cffi/__pycache__/setuptools_ext.cpython-312.pyc,,
+cffi/__pycache__/vengine_cpy.cpython-312.pyc,,
+cffi/__pycache__/vengine_gen.cpython-312.pyc,,
+cffi/__pycache__/verifier.cpython-312.pyc,,
+cffi/_cffi_errors.h,sha256=zQXt7uR_m8gUW-fI2hJg0KoSkJFwXv8RGUkEDZ177dQ,3908
+cffi/_cffi_include.h,sha256=Exhmgm9qzHWzWivjfTe0D7Xp4rPUkVxdNuwGhMTMzbw,15055
+cffi/_embedding.h,sha256=Ai33FHblE7XSpHOCp8kPcWwN5_9BV14OvN0JVa6ITpw,18786
+cffi/_imp_emulation.py,sha256=RxREG8zAbI2RPGBww90u_5fi8sWdahpdipOoPzkp7C0,2960
+cffi/_shimmed_dist_utils.py,sha256=Bjj2wm8yZbvFvWEx5AEfmqaqZyZFhYfoyLLQHkXZuao,2230
+cffi/api.py,sha256=alBv6hZQkjpmZplBphdaRn2lPO9-CORs_M7ixabvZWI,42169
+cffi/backend_ctypes.py,sha256=h5ZIzLc6BFVXnGyc9xPqZWUS7qGy7yFSDqXe68Sa8z4,42454
+cffi/cffi_opcode.py,sha256=JDV5l0R0_OadBX_uE7xPPTYtMdmpp8I9UYd6av7aiDU,5731
+cffi/commontypes.py,sha256=7N6zPtCFlvxXMWhHV08psUjdYIK2XgsN3yo5dgua_v4,2805
+cffi/cparser.py,sha256=QUTfmlL-aO-MYR8bFGlvAUHc36OQr7XYLe0WLkGFjRo,44790
+cffi/error.py,sha256=v6xTiS4U0kvDcy4h_BDRo5v39ZQuj-IMRYLv5ETddZs,877
+cffi/ffiplatform.py,sha256=avxFjdikYGJoEtmJO7ewVmwG_VEVl6EZ_WaNhZYCqv4,3584
+cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747
+cffi/model.py,sha256=W30UFQZE73jL5Mx5N81YT77us2W2iJjTm0XYfnwz1cg,21797
+cffi/parse_c_type.h,sha256=OdwQfwM9ktq6vlCB43exFQmxDBtj2MBNdK8LYl15tjw,5976
+cffi/pkgconfig.py,sha256=LP1w7vmWvmKwyqLaU1Z243FOWGNQMrgMUZrvgFuOlco,4374
+cffi/recompiler.py,sha256=78J6lMEEOygXNmjN9-fOFFO3j7eW-iFxSrxfvQb54bY,65509
+cffi/setuptools_ext.py,sha256=0rCwBJ1W7FHWtiMKfNXsSST88V8UXrui5oeXFlDNLG8,9411
+cffi/vengine_cpy.py,sha256=oyQKD23kpE0aChUKA8Jg0e723foPiYzLYEdb-J0MiNs,43881
+cffi/vengine_gen.py,sha256=DUlEIrDiVin1Pnhn1sfoamnS5NLqfJcOdhRoeSNeJRg,26939
+cffi/verifier.py,sha256=oX8jpaohg2Qm3aHcznidAdvrVm5N4sQYG0a3Eo5mIl4,11182
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/WHEEL
new file mode 100644
index 00000000..e21e9f2f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: setuptools (80.9.0)
+Root-Is-Purelib: false
+Tag: cp312-cp312-manylinux_2_17_x86_64
+Tag: cp312-cp312-manylinux2014_x86_64
+
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/entry_points.txt b/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/entry_points.txt
new file mode 100644
index 00000000..4b0274f2
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/entry_points.txt
@@ -0,0 +1,2 @@
+[distutils.setup_keywords]
+cffi_modules = cffi.setuptools_ext:cffi_modules
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/licenses/AUTHORS b/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/licenses/AUTHORS
new file mode 100644
index 00000000..370a25d3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/licenses/AUTHORS
@@ -0,0 +1,8 @@
+This package has been mostly done by Armin Rigo with help from
+Maciej Fijałkowski. The idea is heavily based (although not directly
+copied) from LuaJIT ffi by Mike Pall.
+
+
+Other contributors:
+
+ Google Inc.
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/licenses/LICENSE b/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/licenses/LICENSE
new file mode 100644
index 00000000..0a1dbfb0
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/licenses/LICENSE
@@ -0,0 +1,23 @@
+
+Except when otherwise stated (look for LICENSE files in directories or
+information at the beginning of each file) all software and
+documentation is licensed as follows:
+
+ MIT No Attribution
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without
+ restriction, including without limitation the rights to use,
+ copy, modify, merge, publish, distribute, sublicense, and/or
+ sell copies of the Software, and to permit persons to whom the
+ Software is furnished to do so.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+ OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+ THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ DEALINGS IN THE SOFTWARE.
+
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/top_level.txt b/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/top_level.txt
new file mode 100644
index 00000000..f6457795
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi-2.0.0.dist-info/top_level.txt
@@ -0,0 +1,2 @@
+_cffi_backend
+cffi
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__init__.py b/Backend/venv/lib/python3.12/site-packages/cffi/__init__.py
new file mode 100644
index 00000000..c99ec3d4
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/__init__.py
@@ -0,0 +1,14 @@
+__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError',
+ 'FFIError']
+
+from .api import FFI
+from .error import CDefError, FFIError, VerificationError, VerificationMissing
+from .error import PkgConfigError
+
+__version__ = "2.0.0"
+__version_info__ = (2, 0, 0)
+
+# The verifier module file names are based on the CRC32 of a string that
+# contains the following version number. It may be older than __version__
+# if nothing is clearly incompatible.
+__version_verifier_modules__ = "0.8.6"
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..31e47947
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/_imp_emulation.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/_imp_emulation.cpython-312.pyc
new file mode 100644
index 00000000..778acda5
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/_imp_emulation.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc
new file mode 100644
index 00000000..a42e71a7
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/api.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/api.cpython-312.pyc
new file mode 100644
index 00000000..b64aeb32
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/api.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/backend_ctypes.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/backend_ctypes.cpython-312.pyc
new file mode 100644
index 00000000..5732943e
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/backend_ctypes.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/cffi_opcode.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/cffi_opcode.cpython-312.pyc
new file mode 100644
index 00000000..9f1d6613
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/cffi_opcode.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/commontypes.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/commontypes.cpython-312.pyc
new file mode 100644
index 00000000..70ec73bb
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/commontypes.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/cparser.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/cparser.cpython-312.pyc
new file mode 100644
index 00000000..dc09fdb0
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/cparser.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/error.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/error.cpython-312.pyc
new file mode 100644
index 00000000..d43bb443
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/error.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/ffiplatform.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/ffiplatform.cpython-312.pyc
new file mode 100644
index 00000000..a87e3dae
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/ffiplatform.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/lock.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/lock.cpython-312.pyc
new file mode 100644
index 00000000..5886038e
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/lock.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/model.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/model.cpython-312.pyc
new file mode 100644
index 00000000..bf6bf35b
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/model.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/pkgconfig.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/pkgconfig.cpython-312.pyc
new file mode 100644
index 00000000..4bd39a49
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/pkgconfig.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/recompiler.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/recompiler.cpython-312.pyc
new file mode 100644
index 00000000..77385a78
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/recompiler.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/setuptools_ext.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/setuptools_ext.cpython-312.pyc
new file mode 100644
index 00000000..a6567510
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/setuptools_ext.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/vengine_cpy.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/vengine_cpy.cpython-312.pyc
new file mode 100644
index 00000000..47c249df
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/vengine_cpy.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/vengine_gen.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/vengine_gen.cpython-312.pyc
new file mode 100644
index 00000000..373c1e22
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/vengine_gen.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/verifier.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/verifier.cpython-312.pyc
new file mode 100644
index 00000000..4f5cd407
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cffi/__pycache__/verifier.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/_cffi_errors.h b/Backend/venv/lib/python3.12/site-packages/cffi/_cffi_errors.h
new file mode 100644
index 00000000..158e0590
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/_cffi_errors.h
@@ -0,0 +1,149 @@
+#ifndef CFFI_MESSAGEBOX
+# ifdef _MSC_VER
+# define CFFI_MESSAGEBOX 1
+# else
+# define CFFI_MESSAGEBOX 0
+# endif
+#endif
+
+
+#if CFFI_MESSAGEBOX
+/* Windows only: logic to take the Python-CFFI embedding logic
+ initialization errors and display them in a background thread
+ with MessageBox. The idea is that if the whole program closes
+ as a result of this problem, then likely it is already a console
+ program and you can read the stderr output in the console too.
+ If it is not a console program, then it will likely show its own
+ dialog to complain, or generally not abruptly close, and for this
+ case the background thread should stay alive.
+*/
+static void *volatile _cffi_bootstrap_text;
+
+static PyObject *_cffi_start_error_capture(void)
+{
+ PyObject *result = NULL;
+ PyObject *x, *m, *bi;
+
+ if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text,
+ (void *)1, NULL) != NULL)
+ return (PyObject *)1;
+
+ m = PyImport_AddModule("_cffi_error_capture");
+ if (m == NULL)
+ goto error;
+
+ result = PyModule_GetDict(m);
+ if (result == NULL)
+ goto error;
+
+#if PY_MAJOR_VERSION >= 3
+ bi = PyImport_ImportModule("builtins");
+#else
+ bi = PyImport_ImportModule("__builtin__");
+#endif
+ if (bi == NULL)
+ goto error;
+ PyDict_SetItemString(result, "__builtins__", bi);
+ Py_DECREF(bi);
+
+ x = PyRun_String(
+ "import sys\n"
+ "class FileLike:\n"
+ " def write(self, x):\n"
+ " try:\n"
+ " of.write(x)\n"
+ " except: pass\n"
+ " self.buf += x\n"
+ " def flush(self):\n"
+ " pass\n"
+ "fl = FileLike()\n"
+ "fl.buf = ''\n"
+ "of = sys.stderr\n"
+ "sys.stderr = fl\n"
+ "def done():\n"
+ " sys.stderr = of\n"
+ " return fl.buf\n", /* make sure the returned value stays alive */
+ Py_file_input,
+ result, result);
+ Py_XDECREF(x);
+
+ error:
+ if (PyErr_Occurred())
+ {
+ PyErr_WriteUnraisable(Py_None);
+ PyErr_Clear();
+ }
+ return result;
+}
+
+#pragma comment(lib, "user32.lib")
+
+static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored)
+{
+ Sleep(666); /* may be interrupted if the whole process is closing */
+#if PY_MAJOR_VERSION >= 3
+ MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text,
+ L"Python-CFFI error",
+ MB_OK | MB_ICONERROR);
+#else
+ MessageBoxA(NULL, (char *)_cffi_bootstrap_text,
+ "Python-CFFI error",
+ MB_OK | MB_ICONERROR);
+#endif
+ _cffi_bootstrap_text = NULL;
+ return 0;
+}
+
+static void _cffi_stop_error_capture(PyObject *ecap)
+{
+ PyObject *s;
+ void *text;
+
+ if (ecap == (PyObject *)1)
+ return;
+
+ if (ecap == NULL)
+ goto error;
+
+ s = PyRun_String("done()", Py_eval_input, ecap, ecap);
+ if (s == NULL)
+ goto error;
+
+ /* Show a dialog box, but in a background thread, and
+ never show multiple dialog boxes at once. */
+#if PY_MAJOR_VERSION >= 3
+ text = PyUnicode_AsWideCharString(s, NULL);
+#else
+ text = PyString_AsString(s);
+#endif
+
+ _cffi_bootstrap_text = text;
+
+ if (text != NULL)
+ {
+ HANDLE h;
+ h = CreateThread(NULL, 0, _cffi_bootstrap_dialog,
+ NULL, 0, NULL);
+ if (h != NULL)
+ CloseHandle(h);
+ }
+ /* decref the string, but it should stay alive as 'fl.buf'
+ in the small module above. It will really be freed only if
+ we later get another similar error. So it's a leak of at
+ most one copy of the small module. That's fine for this
+ situation which is usually a "fatal error" anyway. */
+ Py_DECREF(s);
+ PyErr_Clear();
+ return;
+
+ error:
+ _cffi_bootstrap_text = NULL;
+ PyErr_Clear();
+}
+
+#else
+
+static PyObject *_cffi_start_error_capture(void) { return NULL; }
+static void _cffi_stop_error_capture(PyObject *ecap) { }
+
+#endif
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/_cffi_include.h b/Backend/venv/lib/python3.12/site-packages/cffi/_cffi_include.h
new file mode 100644
index 00000000..908a1d73
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/_cffi_include.h
@@ -0,0 +1,389 @@
+#define _CFFI_
+
+/* We try to define Py_LIMITED_API before including Python.h.
+
+ Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and
+ Py_REF_DEBUG are not defined. This is a best-effort approximation:
+ we can learn about Py_DEBUG from pyconfig.h, but it is unclear if
+ the same works for the other two macros. Py_DEBUG implies them,
+ but not the other way around.
+
+ The implementation is messy (issue #350): on Windows, with _MSC_VER,
+ we have to define Py_LIMITED_API even before including pyconfig.h.
+ In that case, we guess what pyconfig.h will do to the macros above,
+ and check our guess after the #include.
+
+ Note that on Windows, with CPython 3.x, you need >= 3.5 and virtualenv
+ version >= 16.0.0. With older versions of either, you don't get a
+ copy of PYTHON3.DLL in the virtualenv. We can't check the version of
+ CPython *before* we even include pyconfig.h. ffi.set_source() puts
+ a ``#define _CFFI_NO_LIMITED_API'' at the start of this file if it is
+ running on Windows < 3.5, as an attempt at fixing it, but that's
+ arguably wrong because it may not be the target version of Python.
+ Still better than nothing I guess. As another workaround, you can
+ remove the definition of Py_LIMITED_API here.
+
+ See also 'py_limited_api' in cffi/setuptools_ext.py.
+*/
+#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API)
+# ifdef _MSC_VER
+# if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API)
+# define Py_LIMITED_API
+# endif
+# include
+ /* sanity-check: Py_LIMITED_API will cause crashes if any of these
+ are also defined. Normally, the Python file PC/pyconfig.h does not
+ cause any of these to be defined, with the exception that _DEBUG
+ causes Py_DEBUG. Double-check that. */
+# ifdef Py_LIMITED_API
+# if defined(Py_DEBUG)
+# error "pyconfig.h unexpectedly defines Py_DEBUG, but Py_LIMITED_API is set"
+# endif
+# if defined(Py_TRACE_REFS)
+# error "pyconfig.h unexpectedly defines Py_TRACE_REFS, but Py_LIMITED_API is set"
+# endif
+# if defined(Py_REF_DEBUG)
+# error "pyconfig.h unexpectedly defines Py_REF_DEBUG, but Py_LIMITED_API is set"
+# endif
+# endif
+# else
+# include
+# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API)
+# define Py_LIMITED_API
+# endif
+# endif
+#endif
+
+#include
+#ifdef __cplusplus
+extern "C" {
+#endif
+#include
+#include "parse_c_type.h"
+
+/* this block of #ifs should be kept exactly identical between
+ c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
+ and cffi/_cffi_include.h */
+#if defined(_MSC_VER)
+# include /* for alloca() */
+# if _MSC_VER < 1600 /* MSVC < 2010 */
+ typedef __int8 int8_t;
+ typedef __int16 int16_t;
+ typedef __int32 int32_t;
+ typedef __int64 int64_t;
+ typedef unsigned __int8 uint8_t;
+ typedef unsigned __int16 uint16_t;
+ typedef unsigned __int32 uint32_t;
+ typedef unsigned __int64 uint64_t;
+ typedef __int8 int_least8_t;
+ typedef __int16 int_least16_t;
+ typedef __int32 int_least32_t;
+ typedef __int64 int_least64_t;
+ typedef unsigned __int8 uint_least8_t;
+ typedef unsigned __int16 uint_least16_t;
+ typedef unsigned __int32 uint_least32_t;
+ typedef unsigned __int64 uint_least64_t;
+ typedef __int8 int_fast8_t;
+ typedef __int16 int_fast16_t;
+ typedef __int32 int_fast32_t;
+ typedef __int64 int_fast64_t;
+ typedef unsigned __int8 uint_fast8_t;
+ typedef unsigned __int16 uint_fast16_t;
+ typedef unsigned __int32 uint_fast32_t;
+ typedef unsigned __int64 uint_fast64_t;
+ typedef __int64 intmax_t;
+ typedef unsigned __int64 uintmax_t;
+# else
+# include
+# endif
+# if _MSC_VER < 1800 /* MSVC < 2013 */
+# ifndef __cplusplus
+ typedef unsigned char _Bool;
+# endif
+# endif
+# define _cffi_float_complex_t _Fcomplex /* include for it */
+# define _cffi_double_complex_t _Dcomplex /* include for it */
+#else
+# include
+# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
+# include
+# endif
+# define _cffi_float_complex_t float _Complex
+# define _cffi_double_complex_t double _Complex
+#endif
+
+#ifdef __GNUC__
+# define _CFFI_UNUSED_FN __attribute__((unused))
+#else
+# define _CFFI_UNUSED_FN /* nothing */
+#endif
+
+#ifdef __cplusplus
+# ifndef _Bool
+ typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */
+# endif
+#endif
+
+/********** CPython-specific section **********/
+#ifndef PYPY_VERSION
+
+
+#if PY_MAJOR_VERSION >= 3
+# define PyInt_FromLong PyLong_FromLong
+#endif
+
+#define _cffi_from_c_double PyFloat_FromDouble
+#define _cffi_from_c_float PyFloat_FromDouble
+#define _cffi_from_c_long PyInt_FromLong
+#define _cffi_from_c_ulong PyLong_FromUnsignedLong
+#define _cffi_from_c_longlong PyLong_FromLongLong
+#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
+#define _cffi_from_c__Bool PyBool_FromLong
+
+#define _cffi_to_c_double PyFloat_AsDouble
+#define _cffi_to_c_float PyFloat_AsDouble
+
+#define _cffi_from_c_int(x, type) \
+ (((type)-1) > 0 ? /* unsigned */ \
+ (sizeof(type) < sizeof(long) ? \
+ PyInt_FromLong((long)x) : \
+ sizeof(type) == sizeof(long) ? \
+ PyLong_FromUnsignedLong((unsigned long)x) : \
+ PyLong_FromUnsignedLongLong((unsigned long long)x)) : \
+ (sizeof(type) <= sizeof(long) ? \
+ PyInt_FromLong((long)x) : \
+ PyLong_FromLongLong((long long)x)))
+
+#define _cffi_to_c_int(o, type) \
+ ((type)( \
+ sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \
+ : (type)_cffi_to_c_i8(o)) : \
+ sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \
+ : (type)_cffi_to_c_i16(o)) : \
+ sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \
+ : (type)_cffi_to_c_i32(o)) : \
+ sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \
+ : (type)_cffi_to_c_i64(o)) : \
+ (Py_FatalError("unsupported size for type " #type), (type)0)))
+
+#define _cffi_to_c_i8 \
+ ((int(*)(PyObject *))_cffi_exports[1])
+#define _cffi_to_c_u8 \
+ ((int(*)(PyObject *))_cffi_exports[2])
+#define _cffi_to_c_i16 \
+ ((int(*)(PyObject *))_cffi_exports[3])
+#define _cffi_to_c_u16 \
+ ((int(*)(PyObject *))_cffi_exports[4])
+#define _cffi_to_c_i32 \
+ ((int(*)(PyObject *))_cffi_exports[5])
+#define _cffi_to_c_u32 \
+ ((unsigned int(*)(PyObject *))_cffi_exports[6])
+#define _cffi_to_c_i64 \
+ ((long long(*)(PyObject *))_cffi_exports[7])
+#define _cffi_to_c_u64 \
+ ((unsigned long long(*)(PyObject *))_cffi_exports[8])
+#define _cffi_to_c_char \
+ ((int(*)(PyObject *))_cffi_exports[9])
+#define _cffi_from_c_pointer \
+ ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10])
+#define _cffi_to_c_pointer \
+ ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11])
+#define _cffi_get_struct_layout \
+ not used any more
+#define _cffi_restore_errno \
+ ((void(*)(void))_cffi_exports[13])
+#define _cffi_save_errno \
+ ((void(*)(void))_cffi_exports[14])
+#define _cffi_from_c_char \
+ ((PyObject *(*)(char))_cffi_exports[15])
+#define _cffi_from_c_deref \
+ ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16])
+#define _cffi_to_c \
+ ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17])
+#define _cffi_from_c_struct \
+ ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18])
+#define _cffi_to_c_wchar_t \
+ ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19])
+#define _cffi_from_c_wchar_t \
+ ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20])
+#define _cffi_to_c_long_double \
+ ((long double(*)(PyObject *))_cffi_exports[21])
+#define _cffi_to_c__Bool \
+ ((_Bool(*)(PyObject *))_cffi_exports[22])
+#define _cffi_prepare_pointer_call_argument \
+ ((Py_ssize_t(*)(struct _cffi_ctypedescr *, \
+ PyObject *, char **))_cffi_exports[23])
+#define _cffi_convert_array_from_object \
+ ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24])
+#define _CFFI_CPIDX 25
+#define _cffi_call_python \
+ ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX])
+#define _cffi_to_c_wchar3216_t \
+ ((int(*)(PyObject *))_cffi_exports[26])
+#define _cffi_from_c_wchar3216_t \
+ ((PyObject *(*)(int))_cffi_exports[27])
+#define _CFFI_NUM_EXPORTS 28
+
+struct _cffi_ctypedescr;
+
+static void *_cffi_exports[_CFFI_NUM_EXPORTS];
+
+#define _cffi_type(index) ( \
+ assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \
+ (struct _cffi_ctypedescr *)_cffi_types[index])
+
+static PyObject *_cffi_init(const char *module_name, Py_ssize_t version,
+ const struct _cffi_type_context_s *ctx)
+{
+ PyObject *module, *o_arg, *new_module;
+ void *raw[] = {
+ (void *)module_name,
+ (void *)version,
+ (void *)_cffi_exports,
+ (void *)ctx,
+ };
+
+ module = PyImport_ImportModule("_cffi_backend");
+ if (module == NULL)
+ goto failure;
+
+ o_arg = PyLong_FromVoidPtr((void *)raw);
+ if (o_arg == NULL)
+ goto failure;
+
+ new_module = PyObject_CallMethod(
+ module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg);
+
+ Py_DECREF(o_arg);
+ Py_DECREF(module);
+ return new_module;
+
+ failure:
+ Py_XDECREF(module);
+ return NULL;
+}
+
+
+#ifdef HAVE_WCHAR_H
+typedef wchar_t _cffi_wchar_t;
+#else
+typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */
+#endif
+
+_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o)
+{
+ if (sizeof(_cffi_wchar_t) == 2)
+ return (uint16_t)_cffi_to_c_wchar_t(o);
+ else
+ return (uint16_t)_cffi_to_c_wchar3216_t(o);
+}
+
+_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x)
+{
+ if (sizeof(_cffi_wchar_t) == 2)
+ return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
+ else
+ return _cffi_from_c_wchar3216_t((int)x);
+}
+
+_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o)
+{
+ if (sizeof(_cffi_wchar_t) == 4)
+ return (int)_cffi_to_c_wchar_t(o);
+ else
+ return (int)_cffi_to_c_wchar3216_t(o);
+}
+
+_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x)
+{
+ if (sizeof(_cffi_wchar_t) == 4)
+ return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
+ else
+ return _cffi_from_c_wchar3216_t((int)x);
+}
+
+union _cffi_union_alignment_u {
+ unsigned char m_char;
+ unsigned short m_short;
+ unsigned int m_int;
+ unsigned long m_long;
+ unsigned long long m_longlong;
+ float m_float;
+ double m_double;
+ long double m_longdouble;
+};
+
+struct _cffi_freeme_s {
+ struct _cffi_freeme_s *next;
+ union _cffi_union_alignment_u alignment;
+};
+
+_CFFI_UNUSED_FN static int
+_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg,
+ char **output_data, Py_ssize_t datasize,
+ struct _cffi_freeme_s **freeme)
+{
+ char *p;
+ if (datasize < 0)
+ return -1;
+
+ p = *output_data;
+ if (p == NULL) {
+ struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc(
+ offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize);
+ if (fp == NULL)
+ return -1;
+ fp->next = *freeme;
+ *freeme = fp;
+ p = *output_data = (char *)&fp->alignment;
+ }
+ memset((void *)p, 0, (size_t)datasize);
+ return _cffi_convert_array_from_object(p, ctptr, arg);
+}
+
+_CFFI_UNUSED_FN static void
+_cffi_free_array_arguments(struct _cffi_freeme_s *freeme)
+{
+ do {
+ void *p = (void *)freeme;
+ freeme = freeme->next;
+ PyObject_Free(p);
+ } while (freeme != NULL);
+}
+
+/********** end CPython-specific section **********/
+#else
+_CFFI_UNUSED_FN
+static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *);
+# define _cffi_call_python _cffi_call_python_org
+#endif
+
+
+#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0]))
+
+#define _cffi_prim_int(size, sign) \
+ ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \
+ (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \
+ (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \
+ (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \
+ _CFFI__UNKNOWN_PRIM)
+
+#define _cffi_prim_float(size) \
+ ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \
+ (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \
+ (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \
+ _CFFI__UNKNOWN_FLOAT_PRIM)
+
+#define _cffi_check_int(got, got_nonpos, expected) \
+ ((got_nonpos) == (expected <= 0) && \
+ (got) == (unsigned long long)expected)
+
+#ifdef MS_WIN32
+# define _cffi_stdcall __stdcall
+#else
+# define _cffi_stdcall /* nothing */
+#endif
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/_embedding.h b/Backend/venv/lib/python3.12/site-packages/cffi/_embedding.h
new file mode 100644
index 00000000..64c04f67
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/_embedding.h
@@ -0,0 +1,550 @@
+
+/***** Support code for embedding *****/
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+#if defined(_WIN32)
+# define CFFI_DLLEXPORT __declspec(dllexport)
+#elif defined(__GNUC__)
+# define CFFI_DLLEXPORT __attribute__((visibility("default")))
+#else
+# define CFFI_DLLEXPORT /* nothing */
+#endif
+
+
+/* There are two global variables of type _cffi_call_python_fnptr:
+
+ * _cffi_call_python, which we declare just below, is the one called
+ by ``extern "Python"`` implementations.
+
+ * _cffi_call_python_org, which on CPython is actually part of the
+ _cffi_exports[] array, is the function pointer copied from
+ _cffi_backend. If _cffi_start_python() fails, then this is set
+ to NULL; otherwise, it should never be NULL.
+
+ After initialization is complete, both are equal. However, the
+ first one remains equal to &_cffi_start_and_call_python until the
+ very end of initialization, when we are (or should be) sure that
+ concurrent threads also see a completely initialized world, and
+ only then is it changed.
+*/
+#undef _cffi_call_python
+typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *);
+static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *);
+static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python;
+
+
+#ifndef _MSC_VER
+ /* --- Assuming a GCC not infinitely old --- */
+# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n)
+# define cffi_write_barrier() __sync_synchronize()
+# if !defined(__amd64__) && !defined(__x86_64__) && \
+ !defined(__i386__) && !defined(__i386)
+# define cffi_read_barrier() __sync_synchronize()
+# else
+# define cffi_read_barrier() (void)0
+# endif
+#else
+ /* --- Windows threads version --- */
+# include
+# define cffi_compare_and_swap(l,o,n) \
+ (InterlockedCompareExchangePointer(l,n,o) == (o))
+# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0)
+# define cffi_read_barrier() (void)0
+static volatile LONG _cffi_dummy;
+#endif
+
+#ifdef WITH_THREAD
+# ifndef _MSC_VER
+# include
+ static pthread_mutex_t _cffi_embed_startup_lock;
+# else
+ static CRITICAL_SECTION _cffi_embed_startup_lock;
+# endif
+ static char _cffi_embed_startup_lock_ready = 0;
+#endif
+
+static void _cffi_acquire_reentrant_mutex(void)
+{
+ static void *volatile lock = NULL;
+
+ while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) {
+ /* should ideally do a spin loop instruction here, but
+ hard to do it portably and doesn't really matter I
+ think: pthread_mutex_init() should be very fast, and
+ this is only run at start-up anyway. */
+ }
+
+#ifdef WITH_THREAD
+ if (!_cffi_embed_startup_lock_ready) {
+# ifndef _MSC_VER
+ pthread_mutexattr_t attr;
+ pthread_mutexattr_init(&attr);
+ pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE);
+ pthread_mutex_init(&_cffi_embed_startup_lock, &attr);
+# else
+ InitializeCriticalSection(&_cffi_embed_startup_lock);
+# endif
+ _cffi_embed_startup_lock_ready = 1;
+ }
+#endif
+
+ while (!cffi_compare_and_swap(&lock, (void *)1, NULL))
+ ;
+
+#ifndef _MSC_VER
+ pthread_mutex_lock(&_cffi_embed_startup_lock);
+#else
+ EnterCriticalSection(&_cffi_embed_startup_lock);
+#endif
+}
+
+static void _cffi_release_reentrant_mutex(void)
+{
+#ifndef _MSC_VER
+ pthread_mutex_unlock(&_cffi_embed_startup_lock);
+#else
+ LeaveCriticalSection(&_cffi_embed_startup_lock);
+#endif
+}
+
+
+/********** CPython-specific section **********/
+#ifndef PYPY_VERSION
+
+#include "_cffi_errors.h"
+
+
+#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX]
+
+PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */
+
+static void _cffi_py_initialize(void)
+{
+ /* XXX use initsigs=0, which "skips initialization registration of
+ signal handlers, which might be useful when Python is
+ embedded" according to the Python docs. But review and think
+ if it should be a user-controllable setting.
+
+ XXX we should also give a way to write errors to a buffer
+ instead of to stderr.
+
+ XXX if importing 'site' fails, CPython (any version) calls
+ exit(). Should we try to work around this behavior here?
+ */
+ Py_InitializeEx(0);
+}
+
+static int _cffi_initialize_python(void)
+{
+ /* This initializes Python, imports _cffi_backend, and then the
+ present .dll/.so is set up as a CPython C extension module.
+ */
+ int result;
+ PyGILState_STATE state;
+ PyObject *pycode=NULL, *global_dict=NULL, *x;
+ PyObject *builtins;
+
+ state = PyGILState_Ensure();
+
+ /* Call the initxxx() function from the present module. It will
+ create and initialize us as a CPython extension module, instead
+ of letting the startup Python code do it---it might reimport
+ the same .dll/.so and get maybe confused on some platforms.
+ It might also have troubles locating the .dll/.so again for all
+ I know.
+ */
+ (void)_CFFI_PYTHON_STARTUP_FUNC();
+ if (PyErr_Occurred())
+ goto error;
+
+ /* Now run the Python code provided to ffi.embedding_init_code().
+ */
+ pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE,
+ "",
+ Py_file_input);
+ if (pycode == NULL)
+ goto error;
+ global_dict = PyDict_New();
+ if (global_dict == NULL)
+ goto error;
+ builtins = PyEval_GetBuiltins();
+ if (builtins == NULL)
+ goto error;
+ if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0)
+ goto error;
+ x = PyEval_EvalCode(
+#if PY_MAJOR_VERSION < 3
+ (PyCodeObject *)
+#endif
+ pycode, global_dict, global_dict);
+ if (x == NULL)
+ goto error;
+ Py_DECREF(x);
+
+ /* Done! Now if we've been called from
+ _cffi_start_and_call_python() in an ``extern "Python"``, we can
+ only hope that the Python code did correctly set up the
+ corresponding @ffi.def_extern() function. Otherwise, the
+ general logic of ``extern "Python"`` functions (inside the
+ _cffi_backend module) will find that the reference is still
+ missing and print an error.
+ */
+ result = 0;
+ done:
+ Py_XDECREF(pycode);
+ Py_XDECREF(global_dict);
+ PyGILState_Release(state);
+ return result;
+
+ error:;
+ {
+ /* Print as much information as potentially useful.
+ Debugging load-time failures with embedding is not fun
+ */
+ PyObject *ecap;
+ PyObject *exception, *v, *tb, *f, *modules, *mod;
+ PyErr_Fetch(&exception, &v, &tb);
+ ecap = _cffi_start_error_capture();
+ f = PySys_GetObject((char *)"stderr");
+ if (f != NULL && f != Py_None) {
+ PyFile_WriteString(
+ "Failed to initialize the Python-CFFI embedding logic:\n\n", f);
+ }
+
+ if (exception != NULL) {
+ PyErr_NormalizeException(&exception, &v, &tb);
+ PyErr_Display(exception, v, tb);
+ }
+ Py_XDECREF(exception);
+ Py_XDECREF(v);
+ Py_XDECREF(tb);
+
+ if (f != NULL && f != Py_None) {
+ PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME
+ "\ncompiled with cffi version: 2.0.0"
+ "\n_cffi_backend module: ", f);
+ modules = PyImport_GetModuleDict();
+ mod = PyDict_GetItemString(modules, "_cffi_backend");
+ if (mod == NULL) {
+ PyFile_WriteString("not loaded", f);
+ }
+ else {
+ v = PyObject_GetAttrString(mod, "__file__");
+ PyFile_WriteObject(v, f, 0);
+ Py_XDECREF(v);
+ }
+ PyFile_WriteString("\nsys.path: ", f);
+ PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0);
+ PyFile_WriteString("\n\n", f);
+ }
+ _cffi_stop_error_capture(ecap);
+ }
+ result = -1;
+ goto done;
+}
+
+#if PY_VERSION_HEX < 0x03080000
+PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */
+#endif
+
+static int _cffi_carefully_make_gil(void)
+{
+ /* This does the basic initialization of Python. It can be called
+ completely concurrently from unrelated threads. It assumes
+ that we don't hold the GIL before (if it exists), and we don't
+ hold it afterwards.
+
+ (What it really does used to be completely different in Python 2
+ and Python 3, with the Python 2 solution avoiding the spin-lock
+ around the Py_InitializeEx() call. However, after recent changes
+ to CPython 2.7 (issue #358) it no longer works. So we use the
+ Python 3 solution everywhere.)
+
+ This initializes Python by calling Py_InitializeEx().
+ Important: this must not be called concurrently at all.
+ So we use a global variable as a simple spin lock. This global
+ variable must be from 'libpythonX.Y.so', not from this
+ cffi-based extension module, because it must be shared from
+ different cffi-based extension modules.
+
+ In Python < 3.8, we choose
+ _PyParser_TokenNames[0] as a completely arbitrary pointer value
+ that is never written to. The default is to point to the
+ string "ENDMARKER". We change it temporarily to point to the
+ next character in that string. (Yes, I know it's REALLY
+ obscure.)
+
+ In Python >= 3.8, this string array is no longer writable, so
+ instead we pick PyCapsuleType.tp_version_tag. We can't change
+ Python < 3.8 because someone might use a mixture of cffi
+ embedded modules, some of which were compiled before this file
+ changed.
+
+ In Python >= 3.12, this stopped working because that particular
+ tp_version_tag gets modified during interpreter startup. It's
+ arguably a bad idea before 3.12 too, but again we can't change
+ that because someone might use a mixture of cffi embedded
+ modules, and no-one reported a bug so far. In Python >= 3.12
+ we go instead for PyCapsuleType.tp_as_buffer, which is supposed
+ to always be NULL. We write to it temporarily a pointer to
+ a struct full of NULLs, which is semantically the same.
+ */
+
+#ifdef WITH_THREAD
+# if PY_VERSION_HEX < 0x03080000
+ char *volatile *lock = (char *volatile *)_PyParser_TokenNames;
+ char *old_value, *locked_value;
+
+ while (1) { /* spin loop */
+ old_value = *lock;
+ locked_value = old_value + 1;
+ if (old_value[0] == 'E') {
+ assert(old_value[1] == 'N');
+ if (cffi_compare_and_swap(lock, old_value, locked_value))
+ break;
+ }
+ else {
+ assert(old_value[0] == 'N');
+ /* should ideally do a spin loop instruction here, but
+ hard to do it portably and doesn't really matter I
+ think: PyEval_InitThreads() should be very fast, and
+ this is only run at start-up anyway. */
+ }
+ }
+# else
+# if PY_VERSION_HEX < 0x030C0000
+ int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag;
+ int old_value, locked_value = -42;
+ assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG));
+# else
+ static struct ebp_s { PyBufferProcs buf; int mark; } empty_buffer_procs;
+ empty_buffer_procs.mark = -42;
+ PyBufferProcs *volatile *lock = (PyBufferProcs *volatile *)
+ &PyCapsule_Type.tp_as_buffer;
+ PyBufferProcs *old_value, *locked_value = &empty_buffer_procs.buf;
+# endif
+
+ while (1) { /* spin loop */
+ old_value = *lock;
+ if (old_value == 0) {
+ if (cffi_compare_and_swap(lock, old_value, locked_value))
+ break;
+ }
+ else {
+# if PY_VERSION_HEX < 0x030C0000
+ assert(old_value == locked_value);
+# else
+ /* The pointer should point to a possibly different
+ empty_buffer_procs from another C extension module */
+ assert(((struct ebp_s *)old_value)->mark == -42);
+# endif
+ /* should ideally do a spin loop instruction here, but
+ hard to do it portably and doesn't really matter I
+ think: PyEval_InitThreads() should be very fast, and
+ this is only run at start-up anyway. */
+ }
+ }
+# endif
+#endif
+
+ /* call Py_InitializeEx() */
+ if (!Py_IsInitialized()) {
+ _cffi_py_initialize();
+#if PY_VERSION_HEX < 0x03070000
+ PyEval_InitThreads();
+#endif
+ PyEval_SaveThread(); /* release the GIL */
+ /* the returned tstate must be the one that has been stored into the
+ autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */
+ }
+ else {
+#if PY_VERSION_HEX < 0x03070000
+ /* PyEval_InitThreads() is always a no-op from CPython 3.7 */
+ PyGILState_STATE state = PyGILState_Ensure();
+ PyEval_InitThreads();
+ PyGILState_Release(state);
+#endif
+ }
+
+#ifdef WITH_THREAD
+ /* release the lock */
+ while (!cffi_compare_and_swap(lock, locked_value, old_value))
+ ;
+#endif
+
+ return 0;
+}
+
+/********** end CPython-specific section **********/
+
+
+#else
+
+
+/********** PyPy-specific section **********/
+
+PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */
+
+static struct _cffi_pypy_init_s {
+ const char *name;
+ void *func; /* function pointer */
+ const char *code;
+} _cffi_pypy_init = {
+ _CFFI_MODULE_NAME,
+ _CFFI_PYTHON_STARTUP_FUNC,
+ _CFFI_PYTHON_STARTUP_CODE,
+};
+
+extern int pypy_carefully_make_gil(const char *);
+extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *);
+
+static int _cffi_carefully_make_gil(void)
+{
+ return pypy_carefully_make_gil(_CFFI_MODULE_NAME);
+}
+
+static int _cffi_initialize_python(void)
+{
+ return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init);
+}
+
+/********** end PyPy-specific section **********/
+
+
+#endif
+
+
+#ifdef __GNUC__
+__attribute__((noinline))
+#endif
+static _cffi_call_python_fnptr _cffi_start_python(void)
+{
+ /* Delicate logic to initialize Python. This function can be
+ called multiple times concurrently, e.g. when the process calls
+ its first ``extern "Python"`` functions in multiple threads at
+ once. It can also be called recursively, in which case we must
+ ignore it. We also have to consider what occurs if several
+ different cffi-based extensions reach this code in parallel
+ threads---it is a different copy of the code, then, and we
+ can't have any shared global variable unless it comes from
+ 'libpythonX.Y.so'.
+
+ Idea:
+
+ * _cffi_carefully_make_gil(): "carefully" call
+ PyEval_InitThreads() (possibly with Py_InitializeEx() first).
+
+ * then we use a (local) custom lock to make sure that a call to this
+ cffi-based extension will wait if another call to the *same*
+ extension is running the initialization in another thread.
+ It is reentrant, so that a recursive call will not block, but
+ only one from a different thread.
+
+ * then we grab the GIL and (Python 2) we call Py_InitializeEx().
+ At this point, concurrent calls to Py_InitializeEx() are not
+ possible: we have the GIL.
+
+ * do the rest of the specific initialization, which may
+ temporarily release the GIL but not the custom lock.
+ Only release the custom lock when we are done.
+ */
+ static char called = 0;
+
+ if (_cffi_carefully_make_gil() != 0)
+ return NULL;
+
+ _cffi_acquire_reentrant_mutex();
+
+ /* Here the GIL exists, but we don't have it. We're only protected
+ from concurrency by the reentrant mutex. */
+
+ /* This file only initializes the embedded module once, the first
+ time this is called, even if there are subinterpreters. */
+ if (!called) {
+ called = 1; /* invoke _cffi_initialize_python() only once,
+ but don't set '_cffi_call_python' right now,
+ otherwise concurrent threads won't call
+ this function at all (we need them to wait) */
+ if (_cffi_initialize_python() == 0) {
+ /* now initialization is finished. Switch to the fast-path. */
+
+ /* We would like nobody to see the new value of
+ '_cffi_call_python' without also seeing the rest of the
+ data initialized. However, this is not possible. But
+ the new value of '_cffi_call_python' is the function
+ 'cffi_call_python()' from _cffi_backend. So: */
+ cffi_write_barrier();
+ /* ^^^ we put a write barrier here, and a corresponding
+ read barrier at the start of cffi_call_python(). This
+ ensures that after that read barrier, we see everything
+ done here before the write barrier.
+ */
+
+ assert(_cffi_call_python_org != NULL);
+ _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org;
+ }
+ else {
+ /* initialization failed. Reset this to NULL, even if it was
+ already set to some other value. Future calls to
+ _cffi_start_python() are still forced to occur, and will
+ always return NULL from now on. */
+ _cffi_call_python_org = NULL;
+ }
+ }
+
+ _cffi_release_reentrant_mutex();
+
+ return (_cffi_call_python_fnptr)_cffi_call_python_org;
+}
+
+static
+void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args)
+{
+ _cffi_call_python_fnptr fnptr;
+ int current_err = errno;
+#ifdef _MSC_VER
+ int current_lasterr = GetLastError();
+#endif
+ fnptr = _cffi_start_python();
+ if (fnptr == NULL) {
+ fprintf(stderr, "function %s() called, but initialization code "
+ "failed. Returning 0.\n", externpy->name);
+ memset(args, 0, externpy->size_of_result);
+ }
+#ifdef _MSC_VER
+ SetLastError(current_lasterr);
+#endif
+ errno = current_err;
+
+ if (fnptr != NULL)
+ fnptr(externpy, args);
+}
+
+
+/* The cffi_start_python() function makes sure Python is initialized
+ and our cffi module is set up. It can be called manually from the
+ user C code. The same effect is obtained automatically from any
+ dll-exported ``extern "Python"`` function. This function returns
+ -1 if initialization failed, 0 if all is OK. */
+_CFFI_UNUSED_FN
+static int cffi_start_python(void)
+{
+ if (_cffi_call_python == &_cffi_start_and_call_python) {
+ if (_cffi_start_python() == NULL)
+ return -1;
+ }
+ cffi_read_barrier();
+ return 0;
+}
+
+#undef cffi_compare_and_swap
+#undef cffi_write_barrier
+#undef cffi_read_barrier
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/_imp_emulation.py b/Backend/venv/lib/python3.12/site-packages/cffi/_imp_emulation.py
new file mode 100644
index 00000000..136abddd
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/_imp_emulation.py
@@ -0,0 +1,83 @@
+
+try:
+ # this works on Python < 3.12
+ from imp import *
+
+except ImportError:
+ # this is a limited emulation for Python >= 3.12.
+ # Note that this is used only for tests or for the old ffi.verify().
+ # This is copied from the source code of Python 3.11.
+
+ from _imp import (acquire_lock, release_lock,
+ is_builtin, is_frozen)
+
+ from importlib._bootstrap import _load
+
+ from importlib import machinery
+ import os
+ import sys
+ import tokenize
+
+ SEARCH_ERROR = 0
+ PY_SOURCE = 1
+ PY_COMPILED = 2
+ C_EXTENSION = 3
+ PY_RESOURCE = 4
+ PKG_DIRECTORY = 5
+ C_BUILTIN = 6
+ PY_FROZEN = 7
+ PY_CODERESOURCE = 8
+ IMP_HOOK = 9
+
+ def get_suffixes():
+ extensions = [(s, 'rb', C_EXTENSION)
+ for s in machinery.EXTENSION_SUFFIXES]
+ source = [(s, 'r', PY_SOURCE) for s in machinery.SOURCE_SUFFIXES]
+ bytecode = [(s, 'rb', PY_COMPILED) for s in machinery.BYTECODE_SUFFIXES]
+ return extensions + source + bytecode
+
+ def find_module(name, path=None):
+ if not isinstance(name, str):
+ raise TypeError("'name' must be a str, not {}".format(type(name)))
+ elif not isinstance(path, (type(None), list)):
+ # Backwards-compatibility
+ raise RuntimeError("'path' must be None or a list, "
+ "not {}".format(type(path)))
+
+ if path is None:
+ if is_builtin(name):
+ return None, None, ('', '', C_BUILTIN)
+ elif is_frozen(name):
+ return None, None, ('', '', PY_FROZEN)
+ else:
+ path = sys.path
+
+ for entry in path:
+ package_directory = os.path.join(entry, name)
+ for suffix in ['.py', machinery.BYTECODE_SUFFIXES[0]]:
+ package_file_name = '__init__' + suffix
+ file_path = os.path.join(package_directory, package_file_name)
+ if os.path.isfile(file_path):
+ return None, package_directory, ('', '', PKG_DIRECTORY)
+ for suffix, mode, type_ in get_suffixes():
+ file_name = name + suffix
+ file_path = os.path.join(entry, file_name)
+ if os.path.isfile(file_path):
+ break
+ else:
+ continue
+ break # Break out of outer loop when breaking out of inner loop.
+ else:
+ raise ImportError(name, name=name)
+
+ encoding = None
+ if 'b' not in mode:
+ with open(file_path, 'rb') as file:
+ encoding = tokenize.detect_encoding(file.readline)[0]
+ file = open(file_path, mode, encoding=encoding)
+ return file, file_path, (suffix, mode, type_)
+
+ def load_dynamic(name, path, file=None):
+ loader = machinery.ExtensionFileLoader(name, path)
+ spec = machinery.ModuleSpec(name=name, loader=loader, origin=path)
+ return _load(spec)
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/_shimmed_dist_utils.py b/Backend/venv/lib/python3.12/site-packages/cffi/_shimmed_dist_utils.py
new file mode 100644
index 00000000..c3d23128
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/_shimmed_dist_utils.py
@@ -0,0 +1,45 @@
+"""
+Temporary shim module to indirect the bits of distutils we need from setuptools/distutils while providing useful
+error messages beyond `No module named 'distutils' on Python >= 3.12, or when setuptools' vendored distutils is broken.
+
+This is a compromise to avoid a hard-dep on setuptools for Python >= 3.12, since many users don't need runtime compilation support from CFFI.
+"""
+import sys
+
+try:
+ # import setuptools first; this is the most robust way to ensure its embedded distutils is available
+ # (the .pth shim should usually work, but this is even more robust)
+ import setuptools
+except Exception as ex:
+ if sys.version_info >= (3, 12):
+ # Python 3.12 has no built-in distutils to fall back on, so any import problem is fatal
+ raise Exception("This CFFI feature requires setuptools on Python >= 3.12. The setuptools module is missing or non-functional.") from ex
+
+ # silently ignore on older Pythons (support fallback to stdlib distutils where available)
+else:
+ del setuptools
+
+try:
+ # bring in just the bits of distutils we need, whether they really came from setuptools or stdlib-embedded distutils
+ from distutils import log, sysconfig
+ from distutils.ccompiler import CCompiler
+ from distutils.command.build_ext import build_ext
+ from distutils.core import Distribution, Extension
+ from distutils.dir_util import mkpath
+ from distutils.errors import DistutilsSetupError, CompileError, LinkError
+ from distutils.log import set_threshold, set_verbosity
+
+ if sys.platform == 'win32':
+ try:
+ # FUTURE: msvc9compiler module was removed in setuptools 74; consider removing, as it's only used by an ancient patch in `recompiler`
+ from distutils.msvc9compiler import MSVCCompiler
+ except ImportError:
+ MSVCCompiler = None
+except Exception as ex:
+ if sys.version_info >= (3, 12):
+ raise Exception("This CFFI feature requires setuptools on Python >= 3.12. Please install the setuptools package.") from ex
+
+ # anything older, just let the underlying distutils import error fly
+ raise Exception("This CFFI feature requires distutils. Please install the distutils or setuptools package.") from ex
+
+del sys
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/api.py b/Backend/venv/lib/python3.12/site-packages/cffi/api.py
new file mode 100644
index 00000000..5a474f3d
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/api.py
@@ -0,0 +1,967 @@
+import sys, types
+from .lock import allocate_lock
+from .error import CDefError
+from . import model
+
+try:
+ callable
+except NameError:
+ # Python 3.1
+ from collections import Callable
+ callable = lambda x: isinstance(x, Callable)
+
+try:
+ basestring
+except NameError:
+ # Python 3.x
+ basestring = str
+
+_unspecified = object()
+
+
+
+class FFI(object):
+ r'''
+ The main top-level class that you instantiate once, or once per module.
+
+ Example usage:
+
+ ffi = FFI()
+ ffi.cdef("""
+ int printf(const char *, ...);
+ """)
+
+ C = ffi.dlopen(None) # standard library
+ -or-
+ C = ffi.verify() # use a C compiler: verify the decl above is right
+
+ C.printf("hello, %s!\n", ffi.new("char[]", "world"))
+ '''
+
+ def __init__(self, backend=None):
+ """Create an FFI instance. The 'backend' argument is used to
+ select a non-default backend, mostly for tests.
+ """
+ if backend is None:
+ # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with
+ # _cffi_backend.so compiled.
+ import _cffi_backend as backend
+ from . import __version__
+ if backend.__version__ != __version__:
+ # bad version! Try to be as explicit as possible.
+ if hasattr(backend, '__file__'):
+ # CPython
+ raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % (
+ __version__, __file__,
+ backend.__version__, backend.__file__))
+ else:
+ # PyPy
+ raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % (
+ __version__, __file__, backend.__version__))
+ # (If you insist you can also try to pass the option
+ # 'backend=backend_ctypes.CTypesBackend()', but don't
+ # rely on it! It's probably not going to work well.)
+
+ from . import cparser
+ self._backend = backend
+ self._lock = allocate_lock()
+ self._parser = cparser.Parser()
+ self._cached_btypes = {}
+ self._parsed_types = types.ModuleType('parsed_types').__dict__
+ self._new_types = types.ModuleType('new_types').__dict__
+ self._function_caches = []
+ self._libraries = []
+ self._cdefsources = []
+ self._included_ffis = []
+ self._windows_unicode = None
+ self._init_once_cache = {}
+ self._cdef_version = None
+ self._embedding = None
+ self._typecache = model.get_typecache(backend)
+ if hasattr(backend, 'set_ffi'):
+ backend.set_ffi(self)
+ for name in list(backend.__dict__):
+ if name.startswith('RTLD_'):
+ setattr(self, name, getattr(backend, name))
+ #
+ with self._lock:
+ self.BVoidP = self._get_cached_btype(model.voidp_type)
+ self.BCharA = self._get_cached_btype(model.char_array_type)
+ if isinstance(backend, types.ModuleType):
+ # _cffi_backend: attach these constants to the class
+ if not hasattr(FFI, 'NULL'):
+ FFI.NULL = self.cast(self.BVoidP, 0)
+ FFI.CData, FFI.CType = backend._get_types()
+ else:
+ # ctypes backend: attach these constants to the instance
+ self.NULL = self.cast(self.BVoidP, 0)
+ self.CData, self.CType = backend._get_types()
+ self.buffer = backend.buffer
+
+ def cdef(self, csource, override=False, packed=False, pack=None):
+ """Parse the given C source. This registers all declared functions,
+ types, and global variables. The functions and global variables can
+ then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'.
+ The types can be used in 'ffi.new()' and other functions.
+ If 'packed' is specified as True, all structs declared inside this
+ cdef are packed, i.e. laid out without any field alignment at all.
+ Alternatively, 'pack' can be a small integer, and requests for
+ alignment greater than that are ignored (pack=1 is equivalent to
+ packed=True).
+ """
+ self._cdef(csource, override=override, packed=packed, pack=pack)
+
+ def embedding_api(self, csource, packed=False, pack=None):
+ self._cdef(csource, packed=packed, pack=pack, dllexport=True)
+ if self._embedding is None:
+ self._embedding = ''
+
+ def _cdef(self, csource, override=False, **options):
+ if not isinstance(csource, str): # unicode, on Python 2
+ if not isinstance(csource, basestring):
+ raise TypeError("cdef() argument must be a string")
+ csource = csource.encode('ascii')
+ with self._lock:
+ self._cdef_version = object()
+ self._parser.parse(csource, override=override, **options)
+ self._cdefsources.append(csource)
+ if override:
+ for cache in self._function_caches:
+ cache.clear()
+ finishlist = self._parser._recomplete
+ if finishlist:
+ self._parser._recomplete = []
+ for tp in finishlist:
+ tp.finish_backend_type(self, finishlist)
+
+ def dlopen(self, name, flags=0):
+ """Load and return a dynamic library identified by 'name'.
+ The standard C library can be loaded by passing None.
+ Note that functions and types declared by 'ffi.cdef()' are not
+ linked to a particular library, just like C headers; in the
+ library we only look for the actual (untyped) symbols.
+ """
+ if not (isinstance(name, basestring) or
+ name is None or
+ isinstance(name, self.CData)):
+ raise TypeError("dlopen(name): name must be a file name, None, "
+ "or an already-opened 'void *' handle")
+ with self._lock:
+ lib, function_cache = _make_ffi_library(self, name, flags)
+ self._function_caches.append(function_cache)
+ self._libraries.append(lib)
+ return lib
+
+ def dlclose(self, lib):
+ """Close a library obtained with ffi.dlopen(). After this call,
+ access to functions or variables from the library will fail
+ (possibly with a segmentation fault).
+ """
+ type(lib).__cffi_close__(lib)
+
+ def _typeof_locked(self, cdecl):
+ # call me with the lock!
+ key = cdecl
+ if key in self._parsed_types:
+ return self._parsed_types[key]
+ #
+ if not isinstance(cdecl, str): # unicode, on Python 2
+ cdecl = cdecl.encode('ascii')
+ #
+ type = self._parser.parse_type(cdecl)
+ really_a_function_type = type.is_raw_function
+ if really_a_function_type:
+ type = type.as_function_pointer()
+ btype = self._get_cached_btype(type)
+ result = btype, really_a_function_type
+ self._parsed_types[key] = result
+ return result
+
+ def _typeof(self, cdecl, consider_function_as_funcptr=False):
+ # string -> ctype object
+ try:
+ result = self._parsed_types[cdecl]
+ except KeyError:
+ with self._lock:
+ result = self._typeof_locked(cdecl)
+ #
+ btype, really_a_function_type = result
+ if really_a_function_type and not consider_function_as_funcptr:
+ raise CDefError("the type %r is a function type, not a "
+ "pointer-to-function type" % (cdecl,))
+ return btype
+
+ def typeof(self, cdecl):
+ """Parse the C type given as a string and return the
+ corresponding object.
+ It can also be used on 'cdata' instance to get its C type.
+ """
+ if isinstance(cdecl, basestring):
+ return self._typeof(cdecl)
+ if isinstance(cdecl, self.CData):
+ return self._backend.typeof(cdecl)
+ if isinstance(cdecl, types.BuiltinFunctionType):
+ res = _builtin_function_type(cdecl)
+ if res is not None:
+ return res
+ if (isinstance(cdecl, types.FunctionType)
+ and hasattr(cdecl, '_cffi_base_type')):
+ with self._lock:
+ return self._get_cached_btype(cdecl._cffi_base_type)
+ raise TypeError(type(cdecl))
+
+ def sizeof(self, cdecl):
+ """Return the size in bytes of the argument. It can be a
+ string naming a C type, or a 'cdata' instance.
+ """
+ if isinstance(cdecl, basestring):
+ BType = self._typeof(cdecl)
+ return self._backend.sizeof(BType)
+ else:
+ return self._backend.sizeof(cdecl)
+
+ def alignof(self, cdecl):
+ """Return the natural alignment size in bytes of the C type
+ given as a string.
+ """
+ if isinstance(cdecl, basestring):
+ cdecl = self._typeof(cdecl)
+ return self._backend.alignof(cdecl)
+
+ def offsetof(self, cdecl, *fields_or_indexes):
+ """Return the offset of the named field inside the given
+ structure or array, which must be given as a C type name.
+ You can give several field names in case of nested structures.
+ You can also give numeric values which correspond to array
+ items, in case of an array type.
+ """
+ if isinstance(cdecl, basestring):
+ cdecl = self._typeof(cdecl)
+ return self._typeoffsetof(cdecl, *fields_or_indexes)[1]
+
+ def new(self, cdecl, init=None):
+ """Allocate an instance according to the specified C type and
+ return a pointer to it. The specified C type must be either a
+ pointer or an array: ``new('X *')`` allocates an X and returns
+ a pointer to it, whereas ``new('X[n]')`` allocates an array of
+ n X'es and returns an array referencing it (which works
+ mostly like a pointer, like in C). You can also use
+ ``new('X[]', n)`` to allocate an array of a non-constant
+ length n.
+
+ The memory is initialized following the rules of declaring a
+ global variable in C: by default it is zero-initialized, but
+ an explicit initializer can be given which can be used to
+ fill all or part of the memory.
+
+ When the returned object goes out of scope, the memory
+ is freed. In other words the returned object has
+ ownership of the value of type 'cdecl' that it points to. This
+ means that the raw data can be used as long as this object is
+ kept alive, but must not be used for a longer time. Be careful
+ about that when copying the pointer to the memory somewhere
+ else, e.g. into another structure.
+ """
+ if isinstance(cdecl, basestring):
+ cdecl = self._typeof(cdecl)
+ return self._backend.newp(cdecl, init)
+
+ def new_allocator(self, alloc=None, free=None,
+ should_clear_after_alloc=True):
+ """Return a new allocator, i.e. a function that behaves like ffi.new()
+ but uses the provided low-level 'alloc' and 'free' functions.
+
+ 'alloc' is called with the size as argument. If it returns NULL, a
+ MemoryError is raised. 'free' is called with the result of 'alloc'
+ as argument. Both can be either Python function or directly C
+ functions. If 'free' is None, then no free function is called.
+ If both 'alloc' and 'free' are None, the default is used.
+
+ If 'should_clear_after_alloc' is set to False, then the memory
+ returned by 'alloc' is assumed to be already cleared (or you are
+ fine with garbage); otherwise CFFI will clear it.
+ """
+ compiled_ffi = self._backend.FFI()
+ allocator = compiled_ffi.new_allocator(alloc, free,
+ should_clear_after_alloc)
+ def allocate(cdecl, init=None):
+ if isinstance(cdecl, basestring):
+ cdecl = self._typeof(cdecl)
+ return allocator(cdecl, init)
+ return allocate
+
+ def cast(self, cdecl, source):
+ """Similar to a C cast: returns an instance of the named C
+ type initialized with the given 'source'. The source is
+ casted between integers or pointers of any type.
+ """
+ if isinstance(cdecl, basestring):
+ cdecl = self._typeof(cdecl)
+ return self._backend.cast(cdecl, source)
+
+ def string(self, cdata, maxlen=-1):
+ """Return a Python string (or unicode string) from the 'cdata'.
+ If 'cdata' is a pointer or array of characters or bytes, returns
+ the null-terminated string. The returned string extends until
+ the first null character, or at most 'maxlen' characters. If
+ 'cdata' is an array then 'maxlen' defaults to its length.
+
+ If 'cdata' is a pointer or array of wchar_t, returns a unicode
+ string following the same rules.
+
+ If 'cdata' is a single character or byte or a wchar_t, returns
+ it as a string or unicode string.
+
+ If 'cdata' is an enum, returns the value of the enumerator as a
+ string, or 'NUMBER' if the value is out of range.
+ """
+ return self._backend.string(cdata, maxlen)
+
+ def unpack(self, cdata, length):
+ """Unpack an array of C data of the given length,
+ returning a Python string/unicode/list.
+
+ If 'cdata' is a pointer to 'char', returns a byte string.
+ It does not stop at the first null. This is equivalent to:
+ ffi.buffer(cdata, length)[:]
+
+ If 'cdata' is a pointer to 'wchar_t', returns a unicode string.
+ 'length' is measured in wchar_t's; it is not the size in bytes.
+
+ If 'cdata' is a pointer to anything else, returns a list of
+ 'length' items. This is a faster equivalent to:
+ [cdata[i] for i in range(length)]
+ """
+ return self._backend.unpack(cdata, length)
+
+ #def buffer(self, cdata, size=-1):
+ # """Return a read-write buffer object that references the raw C data
+ # pointed to by the given 'cdata'. The 'cdata' must be a pointer or
+ # an array. Can be passed to functions expecting a buffer, or directly
+ # manipulated with:
+ #
+ # buf[:] get a copy of it in a regular string, or
+ # buf[idx] as a single character
+ # buf[:] = ...
+ # buf[idx] = ... change the content
+ # """
+ # note that 'buffer' is a type, set on this instance by __init__
+
+ def from_buffer(self, cdecl, python_buffer=_unspecified,
+ require_writable=False):
+ """Return a cdata of the given type pointing to the data of the
+ given Python object, which must support the buffer interface.
+ Note that this is not meant to be used on the built-in types
+ str or unicode (you can build 'char[]' arrays explicitly)
+ but only on objects containing large quantities of raw data
+ in some other format, like 'array.array' or numpy arrays.
+
+ The first argument is optional and default to 'char[]'.
+ """
+ if python_buffer is _unspecified:
+ cdecl, python_buffer = self.BCharA, cdecl
+ elif isinstance(cdecl, basestring):
+ cdecl = self._typeof(cdecl)
+ return self._backend.from_buffer(cdecl, python_buffer,
+ require_writable)
+
+ def memmove(self, dest, src, n):
+ """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest.
+
+ Like the C function memmove(), the memory areas may overlap;
+ apart from that it behaves like the C function memcpy().
+
+ 'src' can be any cdata ptr or array, or any Python buffer object.
+ 'dest' can be any cdata ptr or array, or a writable Python buffer
+ object. The size to copy, 'n', is always measured in bytes.
+
+ Unlike other methods, this one supports all Python buffer including
+ byte strings and bytearrays---but it still does not support
+ non-contiguous buffers.
+ """
+ return self._backend.memmove(dest, src, n)
+
+ def callback(self, cdecl, python_callable=None, error=None, onerror=None):
+ """Return a callback object or a decorator making such a
+ callback object. 'cdecl' must name a C function pointer type.
+ The callback invokes the specified 'python_callable' (which may
+ be provided either directly or via a decorator). Important: the
+ callback object must be manually kept alive for as long as the
+ callback may be invoked from the C level.
+ """
+ def callback_decorator_wrap(python_callable):
+ if not callable(python_callable):
+ raise TypeError("the 'python_callable' argument "
+ "is not callable")
+ return self._backend.callback(cdecl, python_callable,
+ error, onerror)
+ if isinstance(cdecl, basestring):
+ cdecl = self._typeof(cdecl, consider_function_as_funcptr=True)
+ if python_callable is None:
+ return callback_decorator_wrap # decorator mode
+ else:
+ return callback_decorator_wrap(python_callable) # direct mode
+
+ def getctype(self, cdecl, replace_with=''):
+ """Return a string giving the C type 'cdecl', which may be itself
+ a string or a object. If 'replace_with' is given, it gives
+ extra text to append (or insert for more complicated C types), like
+ a variable name, or '*' to get actually the C type 'pointer-to-cdecl'.
+ """
+ if isinstance(cdecl, basestring):
+ cdecl = self._typeof(cdecl)
+ replace_with = replace_with.strip()
+ if (replace_with.startswith('*')
+ and '&[' in self._backend.getcname(cdecl, '&')):
+ replace_with = '(%s)' % replace_with
+ elif replace_with and not replace_with[0] in '[(':
+ replace_with = ' ' + replace_with
+ return self._backend.getcname(cdecl, replace_with)
+
+ def gc(self, cdata, destructor, size=0):
+ """Return a new cdata object that points to the same
+ data. Later, when this new cdata object is garbage-collected,
+ 'destructor(old_cdata_object)' will be called.
+
+ The optional 'size' gives an estimate of the size, used to
+ trigger the garbage collection more eagerly. So far only used
+ on PyPy. It tells the GC that the returned object keeps alive
+ roughly 'size' bytes of external memory.
+ """
+ return self._backend.gcp(cdata, destructor, size)
+
+ def _get_cached_btype(self, type):
+ assert self._lock.acquire(False) is False
+ # call me with the lock!
+ try:
+ BType = self._cached_btypes[type]
+ except KeyError:
+ finishlist = []
+ BType = type.get_cached_btype(self, finishlist)
+ for type in finishlist:
+ type.finish_backend_type(self, finishlist)
+ return BType
+
+ def verify(self, source='', tmpdir=None, **kwargs):
+ """Verify that the current ffi signatures compile on this
+ machine, and return a dynamic library object. The dynamic
+ library can be used to call functions and access global
+ variables declared in this 'ffi'. The library is compiled
+ by the C compiler: it gives you C-level API compatibility
+ (including calling macros). This is unlike 'ffi.dlopen()',
+ which requires binary compatibility in the signatures.
+ """
+ from .verifier import Verifier, _caller_dir_pycache
+ #
+ # If set_unicode(True) was called, insert the UNICODE and
+ # _UNICODE macro declarations
+ if self._windows_unicode:
+ self._apply_windows_unicode(kwargs)
+ #
+ # Set the tmpdir here, and not in Verifier.__init__: it picks
+ # up the caller's directory, which we want to be the caller of
+ # ffi.verify(), as opposed to the caller of Veritier().
+ tmpdir = tmpdir or _caller_dir_pycache()
+ #
+ # Make a Verifier() and use it to load the library.
+ self.verifier = Verifier(self, source, tmpdir, **kwargs)
+ lib = self.verifier.load_library()
+ #
+ # Save the loaded library for keep-alive purposes, even
+ # if the caller doesn't keep it alive itself (it should).
+ self._libraries.append(lib)
+ return lib
+
+ def _get_errno(self):
+ return self._backend.get_errno()
+ def _set_errno(self, errno):
+ self._backend.set_errno(errno)
+ errno = property(_get_errno, _set_errno, None,
+ "the value of 'errno' from/to the C calls")
+
+ def getwinerror(self, code=-1):
+ return self._backend.getwinerror(code)
+
+ def _pointer_to(self, ctype):
+ with self._lock:
+ return model.pointer_cache(self, ctype)
+
+ def addressof(self, cdata, *fields_or_indexes):
+ """Return the address of a .
+ If 'fields_or_indexes' are given, returns the address of that
+ field or array item in the structure or array, recursively in
+ case of nested structures.
+ """
+ try:
+ ctype = self._backend.typeof(cdata)
+ except TypeError:
+ if '__addressof__' in type(cdata).__dict__:
+ return type(cdata).__addressof__(cdata, *fields_or_indexes)
+ raise
+ if fields_or_indexes:
+ ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes)
+ else:
+ if ctype.kind == "pointer":
+ raise TypeError("addressof(pointer)")
+ offset = 0
+ ctypeptr = self._pointer_to(ctype)
+ return self._backend.rawaddressof(ctypeptr, cdata, offset)
+
+ def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes):
+ ctype, offset = self._backend.typeoffsetof(ctype, field_or_index)
+ for field1 in fields_or_indexes:
+ ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1)
+ offset += offset1
+ return ctype, offset
+
+ def include(self, ffi_to_include):
+ """Includes the typedefs, structs, unions and enums defined
+ in another FFI instance. Usage is similar to a #include in C,
+ where a part of the program might include types defined in
+ another part for its own usage. Note that the include()
+ method has no effect on functions, constants and global
+ variables, which must anyway be accessed directly from the
+ lib object returned by the original FFI instance.
+ """
+ if not isinstance(ffi_to_include, FFI):
+ raise TypeError("ffi.include() expects an argument that is also of"
+ " type cffi.FFI, not %r" % (
+ type(ffi_to_include).__name__,))
+ if ffi_to_include is self:
+ raise ValueError("self.include(self)")
+ with ffi_to_include._lock:
+ with self._lock:
+ self._parser.include(ffi_to_include._parser)
+ self._cdefsources.append('[')
+ self._cdefsources.extend(ffi_to_include._cdefsources)
+ self._cdefsources.append(']')
+ self._included_ffis.append(ffi_to_include)
+
+ def new_handle(self, x):
+ return self._backend.newp_handle(self.BVoidP, x)
+
+ def from_handle(self, x):
+ return self._backend.from_handle(x)
+
+ def release(self, x):
+ self._backend.release(x)
+
+ def set_unicode(self, enabled_flag):
+ """Windows: if 'enabled_flag' is True, enable the UNICODE and
+ _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR
+ to be (pointers to) wchar_t. If 'enabled_flag' is False,
+ declare these types to be (pointers to) plain 8-bit characters.
+ This is mostly for backward compatibility; you usually want True.
+ """
+ if self._windows_unicode is not None:
+ raise ValueError("set_unicode() can only be called once")
+ enabled_flag = bool(enabled_flag)
+ if enabled_flag:
+ self.cdef("typedef wchar_t TBYTE;"
+ "typedef wchar_t TCHAR;"
+ "typedef const wchar_t *LPCTSTR;"
+ "typedef const wchar_t *PCTSTR;"
+ "typedef wchar_t *LPTSTR;"
+ "typedef wchar_t *PTSTR;"
+ "typedef TBYTE *PTBYTE;"
+ "typedef TCHAR *PTCHAR;")
+ else:
+ self.cdef("typedef char TBYTE;"
+ "typedef char TCHAR;"
+ "typedef const char *LPCTSTR;"
+ "typedef const char *PCTSTR;"
+ "typedef char *LPTSTR;"
+ "typedef char *PTSTR;"
+ "typedef TBYTE *PTBYTE;"
+ "typedef TCHAR *PTCHAR;")
+ self._windows_unicode = enabled_flag
+
+ def _apply_windows_unicode(self, kwds):
+ defmacros = kwds.get('define_macros', ())
+ if not isinstance(defmacros, (list, tuple)):
+ raise TypeError("'define_macros' must be a list or tuple")
+ defmacros = list(defmacros) + [('UNICODE', '1'),
+ ('_UNICODE', '1')]
+ kwds['define_macros'] = defmacros
+
+ def _apply_embedding_fix(self, kwds):
+ # must include an argument like "-lpython2.7" for the compiler
+ def ensure(key, value):
+ lst = kwds.setdefault(key, [])
+ if value not in lst:
+ lst.append(value)
+ #
+ if '__pypy__' in sys.builtin_module_names:
+ import os
+ if sys.platform == "win32":
+ # we need 'libpypy-c.lib'. Current distributions of
+ # pypy (>= 4.1) contain it as 'libs/python27.lib'.
+ pythonlib = "python{0[0]}{0[1]}".format(sys.version_info)
+ if hasattr(sys, 'prefix'):
+ ensure('library_dirs', os.path.join(sys.prefix, 'libs'))
+ else:
+ # we need 'libpypy-c.{so,dylib}', which should be by
+ # default located in 'sys.prefix/bin' for installed
+ # systems.
+ if sys.version_info < (3,):
+ pythonlib = "pypy-c"
+ else:
+ pythonlib = "pypy3-c"
+ if hasattr(sys, 'prefix'):
+ ensure('library_dirs', os.path.join(sys.prefix, 'bin'))
+ # On uninstalled pypy's, the libpypy-c is typically found in
+ # .../pypy/goal/.
+ if hasattr(sys, 'prefix'):
+ ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal'))
+ else:
+ if sys.platform == "win32":
+ template = "python%d%d"
+ if hasattr(sys, 'gettotalrefcount'):
+ template += '_d'
+ else:
+ try:
+ import sysconfig
+ except ImportError: # 2.6
+ from cffi._shimmed_dist_utils import sysconfig
+ template = "python%d.%d"
+ if sysconfig.get_config_var('DEBUG_EXT'):
+ template += sysconfig.get_config_var('DEBUG_EXT')
+ pythonlib = (template %
+ (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+ if hasattr(sys, 'abiflags'):
+ pythonlib += sys.abiflags
+ ensure('libraries', pythonlib)
+ if sys.platform == "win32":
+ ensure('extra_link_args', '/MANIFEST')
+
+ def set_source(self, module_name, source, source_extension='.c', **kwds):
+ import os
+ if hasattr(self, '_assigned_source'):
+ raise ValueError("set_source() cannot be called several times "
+ "per ffi object")
+ if not isinstance(module_name, basestring):
+ raise TypeError("'module_name' must be a string")
+ if os.sep in module_name or (os.altsep and os.altsep in module_name):
+ raise ValueError("'module_name' must not contain '/': use a dotted "
+ "name to make a 'package.module' location")
+ self._assigned_source = (str(module_name), source,
+ source_extension, kwds)
+
+ def set_source_pkgconfig(self, module_name, pkgconfig_libs, source,
+ source_extension='.c', **kwds):
+ from . import pkgconfig
+ if not isinstance(pkgconfig_libs, list):
+ raise TypeError("the pkgconfig_libs argument must be a list "
+ "of package names")
+ kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs)
+ pkgconfig.merge_flags(kwds, kwds2)
+ self.set_source(module_name, source, source_extension, **kwds)
+
+ def distutils_extension(self, tmpdir='build', verbose=True):
+ from cffi._shimmed_dist_utils import mkpath
+ from .recompiler import recompile
+ #
+ if not hasattr(self, '_assigned_source'):
+ if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored
+ return self.verifier.get_extension()
+ raise ValueError("set_source() must be called before"
+ " distutils_extension()")
+ module_name, source, source_extension, kwds = self._assigned_source
+ if source is None:
+ raise TypeError("distutils_extension() is only for C extension "
+ "modules, not for dlopen()-style pure Python "
+ "modules")
+ mkpath(tmpdir)
+ ext, updated = recompile(self, module_name,
+ source, tmpdir=tmpdir, extradir=tmpdir,
+ source_extension=source_extension,
+ call_c_compiler=False, **kwds)
+ if verbose:
+ if updated:
+ sys.stderr.write("regenerated: %r\n" % (ext.sources[0],))
+ else:
+ sys.stderr.write("not modified: %r\n" % (ext.sources[0],))
+ return ext
+
+ def emit_c_code(self, filename):
+ from .recompiler import recompile
+ #
+ if not hasattr(self, '_assigned_source'):
+ raise ValueError("set_source() must be called before emit_c_code()")
+ module_name, source, source_extension, kwds = self._assigned_source
+ if source is None:
+ raise TypeError("emit_c_code() is only for C extension modules, "
+ "not for dlopen()-style pure Python modules")
+ recompile(self, module_name, source,
+ c_file=filename, call_c_compiler=False,
+ uses_ffiplatform=False, **kwds)
+
+ def emit_python_code(self, filename):
+ from .recompiler import recompile
+ #
+ if not hasattr(self, '_assigned_source'):
+ raise ValueError("set_source() must be called before emit_c_code()")
+ module_name, source, source_extension, kwds = self._assigned_source
+ if source is not None:
+ raise TypeError("emit_python_code() is only for dlopen()-style "
+ "pure Python modules, not for C extension modules")
+ recompile(self, module_name, source,
+ c_file=filename, call_c_compiler=False,
+ uses_ffiplatform=False, **kwds)
+
+ def compile(self, tmpdir='.', verbose=0, target=None, debug=None):
+ """The 'target' argument gives the final file name of the
+ compiled DLL. Use '*' to force distutils' choice, suitable for
+ regular CPython C API modules. Use a file name ending in '.*'
+ to ask for the system's default extension for dynamic libraries
+ (.so/.dll/.dylib).
+
+ The default is '*' when building a non-embedded C API extension,
+ and (module_name + '.*') when building an embedded library.
+ """
+ from .recompiler import recompile
+ #
+ if not hasattr(self, '_assigned_source'):
+ raise ValueError("set_source() must be called before compile()")
+ module_name, source, source_extension, kwds = self._assigned_source
+ return recompile(self, module_name, source, tmpdir=tmpdir,
+ target=target, source_extension=source_extension,
+ compiler_verbose=verbose, debug=debug, **kwds)
+
+ def init_once(self, func, tag):
+ # Read _init_once_cache[tag], which is either (False, lock) if
+ # we're calling the function now in some thread, or (True, result).
+ # Don't call setdefault() in most cases, to avoid allocating and
+ # immediately freeing a lock; but still use setdefaut() to avoid
+ # races.
+ try:
+ x = self._init_once_cache[tag]
+ except KeyError:
+ x = self._init_once_cache.setdefault(tag, (False, allocate_lock()))
+ # Common case: we got (True, result), so we return the result.
+ if x[0]:
+ return x[1]
+ # Else, it's a lock. Acquire it to serialize the following tests.
+ with x[1]:
+ # Read again from _init_once_cache the current status.
+ x = self._init_once_cache[tag]
+ if x[0]:
+ return x[1]
+ # Call the function and store the result back.
+ result = func()
+ self._init_once_cache[tag] = (True, result)
+ return result
+
+ def embedding_init_code(self, pysource):
+ if self._embedding:
+ raise ValueError("embedding_init_code() can only be called once")
+ # fix 'pysource' before it gets dumped into the C file:
+ # - remove empty lines at the beginning, so it starts at "line 1"
+ # - dedent, if all non-empty lines are indented
+ # - check for SyntaxErrors
+ import re
+ match = re.match(r'\s*\n', pysource)
+ if match:
+ pysource = pysource[match.end():]
+ lines = pysource.splitlines() or ['']
+ prefix = re.match(r'\s*', lines[0]).group()
+ for i in range(1, len(lines)):
+ line = lines[i]
+ if line.rstrip():
+ while not line.startswith(prefix):
+ prefix = prefix[:-1]
+ i = len(prefix)
+ lines = [line[i:]+'\n' for line in lines]
+ pysource = ''.join(lines)
+ #
+ compile(pysource, "cffi_init", "exec")
+ #
+ self._embedding = pysource
+
+ def def_extern(self, *args, **kwds):
+ raise ValueError("ffi.def_extern() is only available on API-mode FFI "
+ "objects")
+
+ def list_types(self):
+ """Returns the user type names known to this FFI instance.
+ This returns a tuple containing three lists of names:
+ (typedef_names, names_of_structs, names_of_unions)
+ """
+ typedefs = []
+ structs = []
+ unions = []
+ for key in self._parser._declarations:
+ if key.startswith('typedef '):
+ typedefs.append(key[8:])
+ elif key.startswith('struct '):
+ structs.append(key[7:])
+ elif key.startswith('union '):
+ unions.append(key[6:])
+ typedefs.sort()
+ structs.sort()
+ unions.sort()
+ return (typedefs, structs, unions)
+
+
+def _load_backend_lib(backend, name, flags):
+ import os
+ if not isinstance(name, basestring):
+ if sys.platform != "win32" or name is not None:
+ return backend.load_library(name, flags)
+ name = "c" # Windows: load_library(None) fails, but this works
+ # on Python 2 (backward compatibility hack only)
+ first_error = None
+ if '.' in name or '/' in name or os.sep in name:
+ try:
+ return backend.load_library(name, flags)
+ except OSError as e:
+ first_error = e
+ import ctypes.util
+ path = ctypes.util.find_library(name)
+ if path is None:
+ if name == "c" and sys.platform == "win32" and sys.version_info >= (3,):
+ raise OSError("dlopen(None) cannot work on Windows for Python 3 "
+ "(see http://bugs.python.org/issue23606)")
+ msg = ("ctypes.util.find_library() did not manage "
+ "to locate a library called %r" % (name,))
+ if first_error is not None:
+ msg = "%s. Additionally, %s" % (first_error, msg)
+ raise OSError(msg)
+ return backend.load_library(path, flags)
+
+def _make_ffi_library(ffi, libname, flags):
+ backend = ffi._backend
+ backendlib = _load_backend_lib(backend, libname, flags)
+ #
+ def accessor_function(name):
+ key = 'function ' + name
+ tp, _ = ffi._parser._declarations[key]
+ BType = ffi._get_cached_btype(tp)
+ value = backendlib.load_function(BType, name)
+ library.__dict__[name] = value
+ #
+ def accessor_variable(name):
+ key = 'variable ' + name
+ tp, _ = ffi._parser._declarations[key]
+ BType = ffi._get_cached_btype(tp)
+ read_variable = backendlib.read_variable
+ write_variable = backendlib.write_variable
+ setattr(FFILibrary, name, property(
+ lambda self: read_variable(BType, name),
+ lambda self, value: write_variable(BType, name, value)))
+ #
+ def addressof_var(name):
+ try:
+ return addr_variables[name]
+ except KeyError:
+ with ffi._lock:
+ if name not in addr_variables:
+ key = 'variable ' + name
+ tp, _ = ffi._parser._declarations[key]
+ BType = ffi._get_cached_btype(tp)
+ if BType.kind != 'array':
+ BType = model.pointer_cache(ffi, BType)
+ p = backendlib.load_function(BType, name)
+ addr_variables[name] = p
+ return addr_variables[name]
+ #
+ def accessor_constant(name):
+ raise NotImplementedError("non-integer constant '%s' cannot be "
+ "accessed from a dlopen() library" % (name,))
+ #
+ def accessor_int_constant(name):
+ library.__dict__[name] = ffi._parser._int_constants[name]
+ #
+ accessors = {}
+ accessors_version = [False]
+ addr_variables = {}
+ #
+ def update_accessors():
+ if accessors_version[0] is ffi._cdef_version:
+ return
+ #
+ for key, (tp, _) in ffi._parser._declarations.items():
+ if not isinstance(tp, model.EnumType):
+ tag, name = key.split(' ', 1)
+ if tag == 'function':
+ accessors[name] = accessor_function
+ elif tag == 'variable':
+ accessors[name] = accessor_variable
+ elif tag == 'constant':
+ accessors[name] = accessor_constant
+ else:
+ for i, enumname in enumerate(tp.enumerators):
+ def accessor_enum(name, tp=tp, i=i):
+ tp.check_not_partial()
+ library.__dict__[name] = tp.enumvalues[i]
+ accessors[enumname] = accessor_enum
+ for name in ffi._parser._int_constants:
+ accessors.setdefault(name, accessor_int_constant)
+ accessors_version[0] = ffi._cdef_version
+ #
+ def make_accessor(name):
+ with ffi._lock:
+ if name in library.__dict__ or name in FFILibrary.__dict__:
+ return # added by another thread while waiting for the lock
+ if name not in accessors:
+ update_accessors()
+ if name not in accessors:
+ raise AttributeError(name)
+ accessors[name](name)
+ #
+ class FFILibrary(object):
+ def __getattr__(self, name):
+ make_accessor(name)
+ return getattr(self, name)
+ def __setattr__(self, name, value):
+ try:
+ property = getattr(self.__class__, name)
+ except AttributeError:
+ make_accessor(name)
+ setattr(self, name, value)
+ else:
+ property.__set__(self, value)
+ def __dir__(self):
+ with ffi._lock:
+ update_accessors()
+ return accessors.keys()
+ def __addressof__(self, name):
+ if name in library.__dict__:
+ return library.__dict__[name]
+ if name in FFILibrary.__dict__:
+ return addressof_var(name)
+ make_accessor(name)
+ if name in library.__dict__:
+ return library.__dict__[name]
+ if name in FFILibrary.__dict__:
+ return addressof_var(name)
+ raise AttributeError("cffi library has no function or "
+ "global variable named '%s'" % (name,))
+ def __cffi_close__(self):
+ backendlib.close_lib()
+ self.__dict__.clear()
+ #
+ if isinstance(libname, basestring):
+ try:
+ if not isinstance(libname, str): # unicode, on Python 2
+ libname = libname.encode('utf-8')
+ FFILibrary.__name__ = 'FFILibrary_%s' % libname
+ except UnicodeError:
+ pass
+ library = FFILibrary()
+ return library, library.__dict__
+
+def _builtin_function_type(func):
+ # a hack to make at least ffi.typeof(builtin_function) work,
+ # if the builtin function was obtained by 'vengine_cpy'.
+ import sys
+ try:
+ module = sys.modules[func.__module__]
+ ffi = module._cffi_original_ffi
+ types_of_builtin_funcs = module._cffi_types_of_builtin_funcs
+ tp = types_of_builtin_funcs[func]
+ except (KeyError, AttributeError, TypeError):
+ return None
+ else:
+ with ffi._lock:
+ return ffi._get_cached_btype(tp)
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/backend_ctypes.py b/Backend/venv/lib/python3.12/site-packages/cffi/backend_ctypes.py
new file mode 100644
index 00000000..e7956a79
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/backend_ctypes.py
@@ -0,0 +1,1121 @@
+import ctypes, ctypes.util, operator, sys
+from . import model
+
+if sys.version_info < (3,):
+ bytechr = chr
+else:
+ unicode = str
+ long = int
+ xrange = range
+ bytechr = lambda num: bytes([num])
+
+class CTypesType(type):
+ pass
+
+class CTypesData(object):
+ __metaclass__ = CTypesType
+ __slots__ = ['__weakref__']
+ __name__ = ''
+
+ def __init__(self, *args):
+ raise TypeError("cannot instantiate %r" % (self.__class__,))
+
+ @classmethod
+ def _newp(cls, init):
+ raise TypeError("expected a pointer or array ctype, got '%s'"
+ % (cls._get_c_name(),))
+
+ @staticmethod
+ def _to_ctypes(value):
+ raise TypeError
+
+ @classmethod
+ def _arg_to_ctypes(cls, *value):
+ try:
+ ctype = cls._ctype
+ except AttributeError:
+ raise TypeError("cannot create an instance of %r" % (cls,))
+ if value:
+ res = cls._to_ctypes(*value)
+ if not isinstance(res, ctype):
+ res = cls._ctype(res)
+ else:
+ res = cls._ctype()
+ return res
+
+ @classmethod
+ def _create_ctype_obj(cls, init):
+ if init is None:
+ return cls._arg_to_ctypes()
+ else:
+ return cls._arg_to_ctypes(init)
+
+ @staticmethod
+ def _from_ctypes(ctypes_value):
+ raise TypeError
+
+ @classmethod
+ def _get_c_name(cls, replace_with=''):
+ return cls._reftypename.replace(' &', replace_with)
+
+ @classmethod
+ def _fix_class(cls):
+ cls.__name__ = 'CData<%s>' % (cls._get_c_name(),)
+ cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),)
+ cls.__module__ = 'ffi'
+
+ def _get_own_repr(self):
+ raise NotImplementedError
+
+ def _addr_repr(self, address):
+ if address == 0:
+ return 'NULL'
+ else:
+ if address < 0:
+ address += 1 << (8*ctypes.sizeof(ctypes.c_void_p))
+ return '0x%x' % address
+
+ def __repr__(self, c_name=None):
+ own = self._get_own_repr()
+ return '' % (c_name or self._get_c_name(), own)
+
+ def _convert_to_address(self, BClass):
+ if BClass is None:
+ raise TypeError("cannot convert %r to an address" % (
+ self._get_c_name(),))
+ else:
+ raise TypeError("cannot convert %r to %r" % (
+ self._get_c_name(), BClass._get_c_name()))
+
+ @classmethod
+ def _get_size(cls):
+ return ctypes.sizeof(cls._ctype)
+
+ def _get_size_of_instance(self):
+ return ctypes.sizeof(self._ctype)
+
+ @classmethod
+ def _cast_from(cls, source):
+ raise TypeError("cannot cast to %r" % (cls._get_c_name(),))
+
+ def _cast_to_integer(self):
+ return self._convert_to_address(None)
+
+ @classmethod
+ def _alignment(cls):
+ return ctypes.alignment(cls._ctype)
+
+ def __iter__(self):
+ raise TypeError("cdata %r does not support iteration" % (
+ self._get_c_name()),)
+
+ def _make_cmp(name):
+ cmpfunc = getattr(operator, name)
+ def cmp(self, other):
+ v_is_ptr = not isinstance(self, CTypesGenericPrimitive)
+ w_is_ptr = (isinstance(other, CTypesData) and
+ not isinstance(other, CTypesGenericPrimitive))
+ if v_is_ptr and w_is_ptr:
+ return cmpfunc(self._convert_to_address(None),
+ other._convert_to_address(None))
+ elif v_is_ptr or w_is_ptr:
+ return NotImplemented
+ else:
+ if isinstance(self, CTypesGenericPrimitive):
+ self = self._value
+ if isinstance(other, CTypesGenericPrimitive):
+ other = other._value
+ return cmpfunc(self, other)
+ cmp.func_name = name
+ return cmp
+
+ __eq__ = _make_cmp('__eq__')
+ __ne__ = _make_cmp('__ne__')
+ __lt__ = _make_cmp('__lt__')
+ __le__ = _make_cmp('__le__')
+ __gt__ = _make_cmp('__gt__')
+ __ge__ = _make_cmp('__ge__')
+
+ def __hash__(self):
+ return hash(self._convert_to_address(None))
+
+ def _to_string(self, maxlen):
+ raise TypeError("string(): %r" % (self,))
+
+
+class CTypesGenericPrimitive(CTypesData):
+ __slots__ = []
+
+ def __hash__(self):
+ return hash(self._value)
+
+ def _get_own_repr(self):
+ return repr(self._from_ctypes(self._value))
+
+
+class CTypesGenericArray(CTypesData):
+ __slots__ = []
+
+ @classmethod
+ def _newp(cls, init):
+ return cls(init)
+
+ def __iter__(self):
+ for i in xrange(len(self)):
+ yield self[i]
+
+ def _get_own_repr(self):
+ return self._addr_repr(ctypes.addressof(self._blob))
+
+
+class CTypesGenericPtr(CTypesData):
+ __slots__ = ['_address', '_as_ctype_ptr']
+ _automatic_casts = False
+ kind = "pointer"
+
+ @classmethod
+ def _newp(cls, init):
+ return cls(init)
+
+ @classmethod
+ def _cast_from(cls, source):
+ if source is None:
+ address = 0
+ elif isinstance(source, CTypesData):
+ address = source._cast_to_integer()
+ elif isinstance(source, (int, long)):
+ address = source
+ else:
+ raise TypeError("bad type for cast to %r: %r" %
+ (cls, type(source).__name__))
+ return cls._new_pointer_at(address)
+
+ @classmethod
+ def _new_pointer_at(cls, address):
+ self = cls.__new__(cls)
+ self._address = address
+ self._as_ctype_ptr = ctypes.cast(address, cls._ctype)
+ return self
+
+ def _get_own_repr(self):
+ try:
+ return self._addr_repr(self._address)
+ except AttributeError:
+ return '???'
+
+ def _cast_to_integer(self):
+ return self._address
+
+ def __nonzero__(self):
+ return bool(self._address)
+ __bool__ = __nonzero__
+
+ @classmethod
+ def _to_ctypes(cls, value):
+ if not isinstance(value, CTypesData):
+ raise TypeError("unexpected %s object" % type(value).__name__)
+ address = value._convert_to_address(cls)
+ return ctypes.cast(address, cls._ctype)
+
+ @classmethod
+ def _from_ctypes(cls, ctypes_ptr):
+ address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0
+ return cls._new_pointer_at(address)
+
+ @classmethod
+ def _initialize(cls, ctypes_ptr, value):
+ if value:
+ ctypes_ptr.contents = cls._to_ctypes(value).contents
+
+ def _convert_to_address(self, BClass):
+ if (BClass in (self.__class__, None) or BClass._automatic_casts
+ or self._automatic_casts):
+ return self._address
+ else:
+ return CTypesData._convert_to_address(self, BClass)
+
+
+class CTypesBaseStructOrUnion(CTypesData):
+ __slots__ = ['_blob']
+
+ @classmethod
+ def _create_ctype_obj(cls, init):
+ # may be overridden
+ raise TypeError("cannot instantiate opaque type %s" % (cls,))
+
+ def _get_own_repr(self):
+ return self._addr_repr(ctypes.addressof(self._blob))
+
+ @classmethod
+ def _offsetof(cls, fieldname):
+ return getattr(cls._ctype, fieldname).offset
+
+ def _convert_to_address(self, BClass):
+ if getattr(BClass, '_BItem', None) is self.__class__:
+ return ctypes.addressof(self._blob)
+ else:
+ return CTypesData._convert_to_address(self, BClass)
+
+ @classmethod
+ def _from_ctypes(cls, ctypes_struct_or_union):
+ self = cls.__new__(cls)
+ self._blob = ctypes_struct_or_union
+ return self
+
+ @classmethod
+ def _to_ctypes(cls, value):
+ return value._blob
+
+ def __repr__(self, c_name=None):
+ return CTypesData.__repr__(self, c_name or self._get_c_name(' &'))
+
+
+class CTypesBackend(object):
+
+ PRIMITIVE_TYPES = {
+ 'char': ctypes.c_char,
+ 'short': ctypes.c_short,
+ 'int': ctypes.c_int,
+ 'long': ctypes.c_long,
+ 'long long': ctypes.c_longlong,
+ 'signed char': ctypes.c_byte,
+ 'unsigned char': ctypes.c_ubyte,
+ 'unsigned short': ctypes.c_ushort,
+ 'unsigned int': ctypes.c_uint,
+ 'unsigned long': ctypes.c_ulong,
+ 'unsigned long long': ctypes.c_ulonglong,
+ 'float': ctypes.c_float,
+ 'double': ctypes.c_double,
+ '_Bool': ctypes.c_bool,
+ }
+
+ for _name in ['unsigned long long', 'unsigned long',
+ 'unsigned int', 'unsigned short', 'unsigned char']:
+ _size = ctypes.sizeof(PRIMITIVE_TYPES[_name])
+ PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name]
+ if _size == ctypes.sizeof(ctypes.c_void_p):
+ PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name]
+ if _size == ctypes.sizeof(ctypes.c_size_t):
+ PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name]
+
+ for _name in ['long long', 'long', 'int', 'short', 'signed char']:
+ _size = ctypes.sizeof(PRIMITIVE_TYPES[_name])
+ PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name]
+ if _size == ctypes.sizeof(ctypes.c_void_p):
+ PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name]
+ PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name]
+ if _size == ctypes.sizeof(ctypes.c_size_t):
+ PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name]
+
+
+ def __init__(self):
+ self.RTLD_LAZY = 0 # not supported anyway by ctypes
+ self.RTLD_NOW = 0
+ self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL
+ self.RTLD_LOCAL = ctypes.RTLD_LOCAL
+
+ def set_ffi(self, ffi):
+ self.ffi = ffi
+
+ def _get_types(self):
+ return CTypesData, CTypesType
+
+ def load_library(self, path, flags=0):
+ cdll = ctypes.CDLL(path, flags)
+ return CTypesLibrary(self, cdll)
+
+ def new_void_type(self):
+ class CTypesVoid(CTypesData):
+ __slots__ = []
+ _reftypename = 'void &'
+ @staticmethod
+ def _from_ctypes(novalue):
+ return None
+ @staticmethod
+ def _to_ctypes(novalue):
+ if novalue is not None:
+ raise TypeError("None expected, got %s object" %
+ (type(novalue).__name__,))
+ return None
+ CTypesVoid._fix_class()
+ return CTypesVoid
+
+ def new_primitive_type(self, name):
+ if name == 'wchar_t':
+ raise NotImplementedError(name)
+ ctype = self.PRIMITIVE_TYPES[name]
+ if name == 'char':
+ kind = 'char'
+ elif name in ('float', 'double'):
+ kind = 'float'
+ else:
+ if name in ('signed char', 'unsigned char'):
+ kind = 'byte'
+ elif name == '_Bool':
+ kind = 'bool'
+ else:
+ kind = 'int'
+ is_signed = (ctype(-1).value == -1)
+ #
+ def _cast_source_to_int(source):
+ if isinstance(source, (int, long, float)):
+ source = int(source)
+ elif isinstance(source, CTypesData):
+ source = source._cast_to_integer()
+ elif isinstance(source, bytes):
+ source = ord(source)
+ elif source is None:
+ source = 0
+ else:
+ raise TypeError("bad type for cast to %r: %r" %
+ (CTypesPrimitive, type(source).__name__))
+ return source
+ #
+ kind1 = kind
+ class CTypesPrimitive(CTypesGenericPrimitive):
+ __slots__ = ['_value']
+ _ctype = ctype
+ _reftypename = '%s &' % name
+ kind = kind1
+
+ def __init__(self, value):
+ self._value = value
+
+ @staticmethod
+ def _create_ctype_obj(init):
+ if init is None:
+ return ctype()
+ return ctype(CTypesPrimitive._to_ctypes(init))
+
+ if kind == 'int' or kind == 'byte':
+ @classmethod
+ def _cast_from(cls, source):
+ source = _cast_source_to_int(source)
+ source = ctype(source).value # cast within range
+ return cls(source)
+ def __int__(self):
+ return self._value
+
+ if kind == 'bool':
+ @classmethod
+ def _cast_from(cls, source):
+ if not isinstance(source, (int, long, float)):
+ source = _cast_source_to_int(source)
+ return cls(bool(source))
+ def __int__(self):
+ return int(self._value)
+
+ if kind == 'char':
+ @classmethod
+ def _cast_from(cls, source):
+ source = _cast_source_to_int(source)
+ source = bytechr(source & 0xFF)
+ return cls(source)
+ def __int__(self):
+ return ord(self._value)
+
+ if kind == 'float':
+ @classmethod
+ def _cast_from(cls, source):
+ if isinstance(source, float):
+ pass
+ elif isinstance(source, CTypesGenericPrimitive):
+ if hasattr(source, '__float__'):
+ source = float(source)
+ else:
+ source = int(source)
+ else:
+ source = _cast_source_to_int(source)
+ source = ctype(source).value # fix precision
+ return cls(source)
+ def __int__(self):
+ return int(self._value)
+ def __float__(self):
+ return self._value
+
+ _cast_to_integer = __int__
+
+ if kind == 'int' or kind == 'byte' or kind == 'bool':
+ @staticmethod
+ def _to_ctypes(x):
+ if not isinstance(x, (int, long)):
+ if isinstance(x, CTypesData):
+ x = int(x)
+ else:
+ raise TypeError("integer expected, got %s" %
+ type(x).__name__)
+ if ctype(x).value != x:
+ if not is_signed and x < 0:
+ raise OverflowError("%s: negative integer" % name)
+ else:
+ raise OverflowError("%s: integer out of bounds"
+ % name)
+ return x
+
+ if kind == 'char':
+ @staticmethod
+ def _to_ctypes(x):
+ if isinstance(x, bytes) and len(x) == 1:
+ return x
+ if isinstance(x, CTypesPrimitive): # >
+ return x._value
+ raise TypeError("character expected, got %s" %
+ type(x).__name__)
+ def __nonzero__(self):
+ return ord(self._value) != 0
+ else:
+ def __nonzero__(self):
+ return self._value != 0
+ __bool__ = __nonzero__
+
+ if kind == 'float':
+ @staticmethod
+ def _to_ctypes(x):
+ if not isinstance(x, (int, long, float, CTypesData)):
+ raise TypeError("float expected, got %s" %
+ type(x).__name__)
+ return ctype(x).value
+
+ @staticmethod
+ def _from_ctypes(value):
+ return getattr(value, 'value', value)
+
+ @staticmethod
+ def _initialize(blob, init):
+ blob.value = CTypesPrimitive._to_ctypes(init)
+
+ if kind == 'char':
+ def _to_string(self, maxlen):
+ return self._value
+ if kind == 'byte':
+ def _to_string(self, maxlen):
+ return chr(self._value & 0xff)
+ #
+ CTypesPrimitive._fix_class()
+ return CTypesPrimitive
+
+ def new_pointer_type(self, BItem):
+ getbtype = self.ffi._get_cached_btype
+ if BItem is getbtype(model.PrimitiveType('char')):
+ kind = 'charp'
+ elif BItem in (getbtype(model.PrimitiveType('signed char')),
+ getbtype(model.PrimitiveType('unsigned char'))):
+ kind = 'bytep'
+ elif BItem is getbtype(model.void_type):
+ kind = 'voidp'
+ else:
+ kind = 'generic'
+ #
+ class CTypesPtr(CTypesGenericPtr):
+ __slots__ = ['_own']
+ if kind == 'charp':
+ __slots__ += ['__as_strbuf']
+ _BItem = BItem
+ if hasattr(BItem, '_ctype'):
+ _ctype = ctypes.POINTER(BItem._ctype)
+ _bitem_size = ctypes.sizeof(BItem._ctype)
+ else:
+ _ctype = ctypes.c_void_p
+ if issubclass(BItem, CTypesGenericArray):
+ _reftypename = BItem._get_c_name('(* &)')
+ else:
+ _reftypename = BItem._get_c_name(' * &')
+
+ def __init__(self, init):
+ ctypeobj = BItem._create_ctype_obj(init)
+ if kind == 'charp':
+ self.__as_strbuf = ctypes.create_string_buffer(
+ ctypeobj.value + b'\x00')
+ self._as_ctype_ptr = ctypes.cast(
+ self.__as_strbuf, self._ctype)
+ else:
+ self._as_ctype_ptr = ctypes.pointer(ctypeobj)
+ self._address = ctypes.cast(self._as_ctype_ptr,
+ ctypes.c_void_p).value
+ self._own = True
+
+ def __add__(self, other):
+ if isinstance(other, (int, long)):
+ return self._new_pointer_at(self._address +
+ other * self._bitem_size)
+ else:
+ return NotImplemented
+
+ def __sub__(self, other):
+ if isinstance(other, (int, long)):
+ return self._new_pointer_at(self._address -
+ other * self._bitem_size)
+ elif type(self) is type(other):
+ return (self._address - other._address) // self._bitem_size
+ else:
+ return NotImplemented
+
+ def __getitem__(self, index):
+ if getattr(self, '_own', False) and index != 0:
+ raise IndexError
+ return BItem._from_ctypes(self._as_ctype_ptr[index])
+
+ def __setitem__(self, index, value):
+ self._as_ctype_ptr[index] = BItem._to_ctypes(value)
+
+ if kind == 'charp' or kind == 'voidp':
+ @classmethod
+ def _arg_to_ctypes(cls, *value):
+ if value and isinstance(value[0], bytes):
+ return ctypes.c_char_p(value[0])
+ else:
+ return super(CTypesPtr, cls)._arg_to_ctypes(*value)
+
+ if kind == 'charp' or kind == 'bytep':
+ def _to_string(self, maxlen):
+ if maxlen < 0:
+ maxlen = sys.maxsize
+ p = ctypes.cast(self._as_ctype_ptr,
+ ctypes.POINTER(ctypes.c_char))
+ n = 0
+ while n < maxlen and p[n] != b'\x00':
+ n += 1
+ return b''.join([p[i] for i in range(n)])
+
+ def _get_own_repr(self):
+ if getattr(self, '_own', False):
+ return 'owning %d bytes' % (
+ ctypes.sizeof(self._as_ctype_ptr.contents),)
+ return super(CTypesPtr, self)._get_own_repr()
+ #
+ if (BItem is self.ffi._get_cached_btype(model.void_type) or
+ BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))):
+ CTypesPtr._automatic_casts = True
+ #
+ CTypesPtr._fix_class()
+ return CTypesPtr
+
+ def new_array_type(self, CTypesPtr, length):
+ if length is None:
+ brackets = ' &[]'
+ else:
+ brackets = ' &[%d]' % length
+ BItem = CTypesPtr._BItem
+ getbtype = self.ffi._get_cached_btype
+ if BItem is getbtype(model.PrimitiveType('char')):
+ kind = 'char'
+ elif BItem in (getbtype(model.PrimitiveType('signed char')),
+ getbtype(model.PrimitiveType('unsigned char'))):
+ kind = 'byte'
+ else:
+ kind = 'generic'
+ #
+ class CTypesArray(CTypesGenericArray):
+ __slots__ = ['_blob', '_own']
+ if length is not None:
+ _ctype = BItem._ctype * length
+ else:
+ __slots__.append('_ctype')
+ _reftypename = BItem._get_c_name(brackets)
+ _declared_length = length
+ _CTPtr = CTypesPtr
+
+ def __init__(self, init):
+ if length is None:
+ if isinstance(init, (int, long)):
+ len1 = init
+ init = None
+ elif kind == 'char' and isinstance(init, bytes):
+ len1 = len(init) + 1 # extra null
+ else:
+ init = tuple(init)
+ len1 = len(init)
+ self._ctype = BItem._ctype * len1
+ self._blob = self._ctype()
+ self._own = True
+ if init is not None:
+ self._initialize(self._blob, init)
+
+ @staticmethod
+ def _initialize(blob, init):
+ if isinstance(init, bytes):
+ init = [init[i:i+1] for i in range(len(init))]
+ else:
+ if isinstance(init, CTypesGenericArray):
+ if (len(init) != len(blob) or
+ not isinstance(init, CTypesArray)):
+ raise TypeError("length/type mismatch: %s" % (init,))
+ init = tuple(init)
+ if len(init) > len(blob):
+ raise IndexError("too many initializers")
+ addr = ctypes.cast(blob, ctypes.c_void_p).value
+ PTR = ctypes.POINTER(BItem._ctype)
+ itemsize = ctypes.sizeof(BItem._ctype)
+ for i, value in enumerate(init):
+ p = ctypes.cast(addr + i * itemsize, PTR)
+ BItem._initialize(p.contents, value)
+
+ def __len__(self):
+ return len(self._blob)
+
+ def __getitem__(self, index):
+ if not (0 <= index < len(self._blob)):
+ raise IndexError
+ return BItem._from_ctypes(self._blob[index])
+
+ def __setitem__(self, index, value):
+ if not (0 <= index < len(self._blob)):
+ raise IndexError
+ self._blob[index] = BItem._to_ctypes(value)
+
+ if kind == 'char' or kind == 'byte':
+ def _to_string(self, maxlen):
+ if maxlen < 0:
+ maxlen = len(self._blob)
+ p = ctypes.cast(self._blob,
+ ctypes.POINTER(ctypes.c_char))
+ n = 0
+ while n < maxlen and p[n] != b'\x00':
+ n += 1
+ return b''.join([p[i] for i in range(n)])
+
+ def _get_own_repr(self):
+ if getattr(self, '_own', False):
+ return 'owning %d bytes' % (ctypes.sizeof(self._blob),)
+ return super(CTypesArray, self)._get_own_repr()
+
+ def _convert_to_address(self, BClass):
+ if BClass in (CTypesPtr, None) or BClass._automatic_casts:
+ return ctypes.addressof(self._blob)
+ else:
+ return CTypesData._convert_to_address(self, BClass)
+
+ @staticmethod
+ def _from_ctypes(ctypes_array):
+ self = CTypesArray.__new__(CTypesArray)
+ self._blob = ctypes_array
+ return self
+
+ @staticmethod
+ def _arg_to_ctypes(value):
+ return CTypesPtr._arg_to_ctypes(value)
+
+ def __add__(self, other):
+ if isinstance(other, (int, long)):
+ return CTypesPtr._new_pointer_at(
+ ctypes.addressof(self._blob) +
+ other * ctypes.sizeof(BItem._ctype))
+ else:
+ return NotImplemented
+
+ @classmethod
+ def _cast_from(cls, source):
+ raise NotImplementedError("casting to %r" % (
+ cls._get_c_name(),))
+ #
+ CTypesArray._fix_class()
+ return CTypesArray
+
+ def _new_struct_or_union(self, kind, name, base_ctypes_class):
+ #
+ class struct_or_union(base_ctypes_class):
+ pass
+ struct_or_union.__name__ = '%s_%s' % (kind, name)
+ kind1 = kind
+ #
+ class CTypesStructOrUnion(CTypesBaseStructOrUnion):
+ __slots__ = ['_blob']
+ _ctype = struct_or_union
+ _reftypename = '%s &' % (name,)
+ _kind = kind = kind1
+ #
+ CTypesStructOrUnion._fix_class()
+ return CTypesStructOrUnion
+
+ def new_struct_type(self, name):
+ return self._new_struct_or_union('struct', name, ctypes.Structure)
+
+ def new_union_type(self, name):
+ return self._new_struct_or_union('union', name, ctypes.Union)
+
+ def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp,
+ totalsize=-1, totalalignment=-1, sflags=0,
+ pack=0):
+ if totalsize >= 0 or totalalignment >= 0:
+ raise NotImplementedError("the ctypes backend of CFFI does not support "
+ "structures completed by verify(); please "
+ "compile and install the _cffi_backend module.")
+ struct_or_union = CTypesStructOrUnion._ctype
+ fnames = [fname for (fname, BField, bitsize) in fields]
+ btypes = [BField for (fname, BField, bitsize) in fields]
+ bitfields = [bitsize for (fname, BField, bitsize) in fields]
+ #
+ bfield_types = {}
+ cfields = []
+ for (fname, BField, bitsize) in fields:
+ if bitsize < 0:
+ cfields.append((fname, BField._ctype))
+ bfield_types[fname] = BField
+ else:
+ cfields.append((fname, BField._ctype, bitsize))
+ bfield_types[fname] = Ellipsis
+ if sflags & 8:
+ struct_or_union._pack_ = 1
+ elif pack:
+ struct_or_union._pack_ = pack
+ struct_or_union._fields_ = cfields
+ CTypesStructOrUnion._bfield_types = bfield_types
+ #
+ @staticmethod
+ def _create_ctype_obj(init):
+ result = struct_or_union()
+ if init is not None:
+ initialize(result, init)
+ return result
+ CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj
+ #
+ def initialize(blob, init):
+ if is_union:
+ if len(init) > 1:
+ raise ValueError("union initializer: %d items given, but "
+ "only one supported (use a dict if needed)"
+ % (len(init),))
+ if not isinstance(init, dict):
+ if isinstance(init, (bytes, unicode)):
+ raise TypeError("union initializer: got a str")
+ init = tuple(init)
+ if len(init) > len(fnames):
+ raise ValueError("too many values for %s initializer" %
+ CTypesStructOrUnion._get_c_name())
+ init = dict(zip(fnames, init))
+ addr = ctypes.addressof(blob)
+ for fname, value in init.items():
+ BField, bitsize = name2fieldtype[fname]
+ assert bitsize < 0, \
+ "not implemented: initializer with bit fields"
+ offset = CTypesStructOrUnion._offsetof(fname)
+ PTR = ctypes.POINTER(BField._ctype)
+ p = ctypes.cast(addr + offset, PTR)
+ BField._initialize(p.contents, value)
+ is_union = CTypesStructOrUnion._kind == 'union'
+ name2fieldtype = dict(zip(fnames, zip(btypes, bitfields)))
+ #
+ for fname, BField, bitsize in fields:
+ if fname == '':
+ raise NotImplementedError("nested anonymous structs/unions")
+ if hasattr(CTypesStructOrUnion, fname):
+ raise ValueError("the field name %r conflicts in "
+ "the ctypes backend" % fname)
+ if bitsize < 0:
+ def getter(self, fname=fname, BField=BField,
+ offset=CTypesStructOrUnion._offsetof(fname),
+ PTR=ctypes.POINTER(BField._ctype)):
+ addr = ctypes.addressof(self._blob)
+ p = ctypes.cast(addr + offset, PTR)
+ return BField._from_ctypes(p.contents)
+ def setter(self, value, fname=fname, BField=BField):
+ setattr(self._blob, fname, BField._to_ctypes(value))
+ #
+ if issubclass(BField, CTypesGenericArray):
+ setter = None
+ if BField._declared_length == 0:
+ def getter(self, fname=fname, BFieldPtr=BField._CTPtr,
+ offset=CTypesStructOrUnion._offsetof(fname),
+ PTR=ctypes.POINTER(BField._ctype)):
+ addr = ctypes.addressof(self._blob)
+ p = ctypes.cast(addr + offset, PTR)
+ return BFieldPtr._from_ctypes(p)
+ #
+ else:
+ def getter(self, fname=fname, BField=BField):
+ return BField._from_ctypes(getattr(self._blob, fname))
+ def setter(self, value, fname=fname, BField=BField):
+ # xxx obscure workaround
+ value = BField._to_ctypes(value)
+ oldvalue = getattr(self._blob, fname)
+ setattr(self._blob, fname, value)
+ if value != getattr(self._blob, fname):
+ setattr(self._blob, fname, oldvalue)
+ raise OverflowError("value too large for bitfield")
+ setattr(CTypesStructOrUnion, fname, property(getter, setter))
+ #
+ CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp))
+ for fname in fnames:
+ if hasattr(CTypesPtr, fname):
+ raise ValueError("the field name %r conflicts in "
+ "the ctypes backend" % fname)
+ def getter(self, fname=fname):
+ return getattr(self[0], fname)
+ def setter(self, value, fname=fname):
+ setattr(self[0], fname, value)
+ setattr(CTypesPtr, fname, property(getter, setter))
+
+ def new_function_type(self, BArgs, BResult, has_varargs):
+ nameargs = [BArg._get_c_name() for BArg in BArgs]
+ if has_varargs:
+ nameargs.append('...')
+ nameargs = ', '.join(nameargs)
+ #
+ class CTypesFunctionPtr(CTypesGenericPtr):
+ __slots__ = ['_own_callback', '_name']
+ _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None),
+ *[BArg._ctype for BArg in BArgs],
+ use_errno=True)
+ _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,))
+
+ def __init__(self, init, error=None):
+ # create a callback to the Python callable init()
+ import traceback
+ assert not has_varargs, "varargs not supported for callbacks"
+ if getattr(BResult, '_ctype', None) is not None:
+ error = BResult._from_ctypes(
+ BResult._create_ctype_obj(error))
+ else:
+ error = None
+ def callback(*args):
+ args2 = []
+ for arg, BArg in zip(args, BArgs):
+ args2.append(BArg._from_ctypes(arg))
+ try:
+ res2 = init(*args2)
+ res2 = BResult._to_ctypes(res2)
+ except:
+ traceback.print_exc()
+ res2 = error
+ if issubclass(BResult, CTypesGenericPtr):
+ if res2:
+ res2 = ctypes.cast(res2, ctypes.c_void_p).value
+ # .value: http://bugs.python.org/issue1574593
+ else:
+ res2 = None
+ #print repr(res2)
+ return res2
+ if issubclass(BResult, CTypesGenericPtr):
+ # The only pointers callbacks can return are void*s:
+ # http://bugs.python.org/issue5710
+ callback_ctype = ctypes.CFUNCTYPE(
+ ctypes.c_void_p,
+ *[BArg._ctype for BArg in BArgs],
+ use_errno=True)
+ else:
+ callback_ctype = CTypesFunctionPtr._ctype
+ self._as_ctype_ptr = callback_ctype(callback)
+ self._address = ctypes.cast(self._as_ctype_ptr,
+ ctypes.c_void_p).value
+ self._own_callback = init
+
+ @staticmethod
+ def _initialize(ctypes_ptr, value):
+ if value:
+ raise NotImplementedError("ctypes backend: not supported: "
+ "initializers for function pointers")
+
+ def __repr__(self):
+ c_name = getattr(self, '_name', None)
+ if c_name:
+ i = self._reftypename.index('(* &)')
+ if self._reftypename[i-1] not in ' )*':
+ c_name = ' ' + c_name
+ c_name = self._reftypename.replace('(* &)', c_name)
+ return CTypesData.__repr__(self, c_name)
+
+ def _get_own_repr(self):
+ if getattr(self, '_own_callback', None) is not None:
+ return 'calling %r' % (self._own_callback,)
+ return super(CTypesFunctionPtr, self)._get_own_repr()
+
+ def __call__(self, *args):
+ if has_varargs:
+ assert len(args) >= len(BArgs)
+ extraargs = args[len(BArgs):]
+ args = args[:len(BArgs)]
+ else:
+ assert len(args) == len(BArgs)
+ ctypes_args = []
+ for arg, BArg in zip(args, BArgs):
+ ctypes_args.append(BArg._arg_to_ctypes(arg))
+ if has_varargs:
+ for i, arg in enumerate(extraargs):
+ if arg is None:
+ ctypes_args.append(ctypes.c_void_p(0)) # NULL
+ continue
+ if not isinstance(arg, CTypesData):
+ raise TypeError(
+ "argument %d passed in the variadic part "
+ "needs to be a cdata object (got %s)" %
+ (1 + len(BArgs) + i, type(arg).__name__))
+ ctypes_args.append(arg._arg_to_ctypes(arg))
+ result = self._as_ctype_ptr(*ctypes_args)
+ return BResult._from_ctypes(result)
+ #
+ CTypesFunctionPtr._fix_class()
+ return CTypesFunctionPtr
+
+ def new_enum_type(self, name, enumerators, enumvalues, CTypesInt):
+ assert isinstance(name, str)
+ reverse_mapping = dict(zip(reversed(enumvalues),
+ reversed(enumerators)))
+ #
+ class CTypesEnum(CTypesInt):
+ __slots__ = []
+ _reftypename = '%s &' % name
+
+ def _get_own_repr(self):
+ value = self._value
+ try:
+ return '%d: %s' % (value, reverse_mapping[value])
+ except KeyError:
+ return str(value)
+
+ def _to_string(self, maxlen):
+ value = self._value
+ try:
+ return reverse_mapping[value]
+ except KeyError:
+ return str(value)
+ #
+ CTypesEnum._fix_class()
+ return CTypesEnum
+
+ def get_errno(self):
+ return ctypes.get_errno()
+
+ def set_errno(self, value):
+ ctypes.set_errno(value)
+
+ def string(self, b, maxlen=-1):
+ return b._to_string(maxlen)
+
+ def buffer(self, bptr, size=-1):
+ raise NotImplementedError("buffer() with ctypes backend")
+
+ def sizeof(self, cdata_or_BType):
+ if isinstance(cdata_or_BType, CTypesData):
+ return cdata_or_BType._get_size_of_instance()
+ else:
+ assert issubclass(cdata_or_BType, CTypesData)
+ return cdata_or_BType._get_size()
+
+ def alignof(self, BType):
+ assert issubclass(BType, CTypesData)
+ return BType._alignment()
+
+ def newp(self, BType, source):
+ if not issubclass(BType, CTypesData):
+ raise TypeError
+ return BType._newp(source)
+
+ def cast(self, BType, source):
+ return BType._cast_from(source)
+
+ def callback(self, BType, source, error, onerror):
+ assert onerror is None # XXX not implemented
+ return BType(source, error)
+
+ _weakref_cache_ref = None
+
+ def gcp(self, cdata, destructor, size=0):
+ if self._weakref_cache_ref is None:
+ import weakref
+ class MyRef(weakref.ref):
+ def __eq__(self, other):
+ myref = self()
+ return self is other or (
+ myref is not None and myref is other())
+ def __ne__(self, other):
+ return not (self == other)
+ def __hash__(self):
+ try:
+ return self._hash
+ except AttributeError:
+ self._hash = hash(self())
+ return self._hash
+ self._weakref_cache_ref = {}, MyRef
+ weak_cache, MyRef = self._weakref_cache_ref
+
+ if destructor is None:
+ try:
+ del weak_cache[MyRef(cdata)]
+ except KeyError:
+ raise TypeError("Can remove destructor only on a object "
+ "previously returned by ffi.gc()")
+ return None
+
+ def remove(k):
+ cdata, destructor = weak_cache.pop(k, (None, None))
+ if destructor is not None:
+ destructor(cdata)
+
+ new_cdata = self.cast(self.typeof(cdata), cdata)
+ assert new_cdata is not cdata
+ weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor)
+ return new_cdata
+
+ typeof = type
+
+ def getcname(self, BType, replace_with):
+ return BType._get_c_name(replace_with)
+
+ def typeoffsetof(self, BType, fieldname, num=0):
+ if isinstance(fieldname, str):
+ if num == 0 and issubclass(BType, CTypesGenericPtr):
+ BType = BType._BItem
+ if not issubclass(BType, CTypesBaseStructOrUnion):
+ raise TypeError("expected a struct or union ctype")
+ BField = BType._bfield_types[fieldname]
+ if BField is Ellipsis:
+ raise TypeError("not supported for bitfields")
+ return (BField, BType._offsetof(fieldname))
+ elif isinstance(fieldname, (int, long)):
+ if issubclass(BType, CTypesGenericArray):
+ BType = BType._CTPtr
+ if not issubclass(BType, CTypesGenericPtr):
+ raise TypeError("expected an array or ptr ctype")
+ BItem = BType._BItem
+ offset = BItem._get_size() * fieldname
+ if offset > sys.maxsize:
+ raise OverflowError
+ return (BItem, offset)
+ else:
+ raise TypeError(type(fieldname))
+
+ def rawaddressof(self, BTypePtr, cdata, offset=None):
+ if isinstance(cdata, CTypesBaseStructOrUnion):
+ ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata))
+ elif isinstance(cdata, CTypesGenericPtr):
+ if offset is None or not issubclass(type(cdata)._BItem,
+ CTypesBaseStructOrUnion):
+ raise TypeError("unexpected cdata type")
+ ptr = type(cdata)._to_ctypes(cdata)
+ elif isinstance(cdata, CTypesGenericArray):
+ ptr = type(cdata)._to_ctypes(cdata)
+ else:
+ raise TypeError("expected a ")
+ if offset:
+ ptr = ctypes.cast(
+ ctypes.c_void_p(
+ ctypes.cast(ptr, ctypes.c_void_p).value + offset),
+ type(ptr))
+ return BTypePtr._from_ctypes(ptr)
+
+
+class CTypesLibrary(object):
+
+ def __init__(self, backend, cdll):
+ self.backend = backend
+ self.cdll = cdll
+
+ def load_function(self, BType, name):
+ c_func = getattr(self.cdll, name)
+ funcobj = BType._from_ctypes(c_func)
+ funcobj._name = name
+ return funcobj
+
+ def read_variable(self, BType, name):
+ try:
+ ctypes_obj = BType._ctype.in_dll(self.cdll, name)
+ except AttributeError as e:
+ raise NotImplementedError(e)
+ return BType._from_ctypes(ctypes_obj)
+
+ def write_variable(self, BType, name, value):
+ new_ctypes_obj = BType._to_ctypes(value)
+ ctypes_obj = BType._ctype.in_dll(self.cdll, name)
+ ctypes.memmove(ctypes.addressof(ctypes_obj),
+ ctypes.addressof(new_ctypes_obj),
+ ctypes.sizeof(BType._ctype))
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/cffi_opcode.py b/Backend/venv/lib/python3.12/site-packages/cffi/cffi_opcode.py
new file mode 100644
index 00000000..6421df62
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/cffi_opcode.py
@@ -0,0 +1,187 @@
+from .error import VerificationError
+
+class CffiOp(object):
+ def __init__(self, op, arg):
+ self.op = op
+ self.arg = arg
+
+ def as_c_expr(self):
+ if self.op is None:
+ assert isinstance(self.arg, str)
+ return '(_cffi_opcode_t)(%s)' % (self.arg,)
+ classname = CLASS_NAME[self.op]
+ return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg)
+
+ def as_python_bytes(self):
+ if self.op is None and self.arg.isdigit():
+ value = int(self.arg) # non-negative: '-' not in self.arg
+ if value >= 2**31:
+ raise OverflowError("cannot emit %r: limited to 2**31-1"
+ % (self.arg,))
+ return format_four_bytes(value)
+ if isinstance(self.arg, str):
+ raise VerificationError("cannot emit to Python: %r" % (self.arg,))
+ return format_four_bytes((self.arg << 8) | self.op)
+
+ def __str__(self):
+ classname = CLASS_NAME.get(self.op, self.op)
+ return '(%s %s)' % (classname, self.arg)
+
+def format_four_bytes(num):
+ return '\\x%02X\\x%02X\\x%02X\\x%02X' % (
+ (num >> 24) & 0xFF,
+ (num >> 16) & 0xFF,
+ (num >> 8) & 0xFF,
+ (num ) & 0xFF)
+
+OP_PRIMITIVE = 1
+OP_POINTER = 3
+OP_ARRAY = 5
+OP_OPEN_ARRAY = 7
+OP_STRUCT_UNION = 9
+OP_ENUM = 11
+OP_FUNCTION = 13
+OP_FUNCTION_END = 15
+OP_NOOP = 17
+OP_BITFIELD = 19
+OP_TYPENAME = 21
+OP_CPYTHON_BLTN_V = 23 # varargs
+OP_CPYTHON_BLTN_N = 25 # noargs
+OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg)
+OP_CONSTANT = 29
+OP_CONSTANT_INT = 31
+OP_GLOBAL_VAR = 33
+OP_DLOPEN_FUNC = 35
+OP_DLOPEN_CONST = 37
+OP_GLOBAL_VAR_F = 39
+OP_EXTERN_PYTHON = 41
+
+PRIM_VOID = 0
+PRIM_BOOL = 1
+PRIM_CHAR = 2
+PRIM_SCHAR = 3
+PRIM_UCHAR = 4
+PRIM_SHORT = 5
+PRIM_USHORT = 6
+PRIM_INT = 7
+PRIM_UINT = 8
+PRIM_LONG = 9
+PRIM_ULONG = 10
+PRIM_LONGLONG = 11
+PRIM_ULONGLONG = 12
+PRIM_FLOAT = 13
+PRIM_DOUBLE = 14
+PRIM_LONGDOUBLE = 15
+
+PRIM_WCHAR = 16
+PRIM_INT8 = 17
+PRIM_UINT8 = 18
+PRIM_INT16 = 19
+PRIM_UINT16 = 20
+PRIM_INT32 = 21
+PRIM_UINT32 = 22
+PRIM_INT64 = 23
+PRIM_UINT64 = 24
+PRIM_INTPTR = 25
+PRIM_UINTPTR = 26
+PRIM_PTRDIFF = 27
+PRIM_SIZE = 28
+PRIM_SSIZE = 29
+PRIM_INT_LEAST8 = 30
+PRIM_UINT_LEAST8 = 31
+PRIM_INT_LEAST16 = 32
+PRIM_UINT_LEAST16 = 33
+PRIM_INT_LEAST32 = 34
+PRIM_UINT_LEAST32 = 35
+PRIM_INT_LEAST64 = 36
+PRIM_UINT_LEAST64 = 37
+PRIM_INT_FAST8 = 38
+PRIM_UINT_FAST8 = 39
+PRIM_INT_FAST16 = 40
+PRIM_UINT_FAST16 = 41
+PRIM_INT_FAST32 = 42
+PRIM_UINT_FAST32 = 43
+PRIM_INT_FAST64 = 44
+PRIM_UINT_FAST64 = 45
+PRIM_INTMAX = 46
+PRIM_UINTMAX = 47
+PRIM_FLOATCOMPLEX = 48
+PRIM_DOUBLECOMPLEX = 49
+PRIM_CHAR16 = 50
+PRIM_CHAR32 = 51
+
+_NUM_PRIM = 52
+_UNKNOWN_PRIM = -1
+_UNKNOWN_FLOAT_PRIM = -2
+_UNKNOWN_LONG_DOUBLE = -3
+
+_IO_FILE_STRUCT = -1
+
+PRIMITIVE_TO_INDEX = {
+ 'char': PRIM_CHAR,
+ 'short': PRIM_SHORT,
+ 'int': PRIM_INT,
+ 'long': PRIM_LONG,
+ 'long long': PRIM_LONGLONG,
+ 'signed char': PRIM_SCHAR,
+ 'unsigned char': PRIM_UCHAR,
+ 'unsigned short': PRIM_USHORT,
+ 'unsigned int': PRIM_UINT,
+ 'unsigned long': PRIM_ULONG,
+ 'unsigned long long': PRIM_ULONGLONG,
+ 'float': PRIM_FLOAT,
+ 'double': PRIM_DOUBLE,
+ 'long double': PRIM_LONGDOUBLE,
+ '_cffi_float_complex_t': PRIM_FLOATCOMPLEX,
+ '_cffi_double_complex_t': PRIM_DOUBLECOMPLEX,
+ '_Bool': PRIM_BOOL,
+ 'wchar_t': PRIM_WCHAR,
+ 'char16_t': PRIM_CHAR16,
+ 'char32_t': PRIM_CHAR32,
+ 'int8_t': PRIM_INT8,
+ 'uint8_t': PRIM_UINT8,
+ 'int16_t': PRIM_INT16,
+ 'uint16_t': PRIM_UINT16,
+ 'int32_t': PRIM_INT32,
+ 'uint32_t': PRIM_UINT32,
+ 'int64_t': PRIM_INT64,
+ 'uint64_t': PRIM_UINT64,
+ 'intptr_t': PRIM_INTPTR,
+ 'uintptr_t': PRIM_UINTPTR,
+ 'ptrdiff_t': PRIM_PTRDIFF,
+ 'size_t': PRIM_SIZE,
+ 'ssize_t': PRIM_SSIZE,
+ 'int_least8_t': PRIM_INT_LEAST8,
+ 'uint_least8_t': PRIM_UINT_LEAST8,
+ 'int_least16_t': PRIM_INT_LEAST16,
+ 'uint_least16_t': PRIM_UINT_LEAST16,
+ 'int_least32_t': PRIM_INT_LEAST32,
+ 'uint_least32_t': PRIM_UINT_LEAST32,
+ 'int_least64_t': PRIM_INT_LEAST64,
+ 'uint_least64_t': PRIM_UINT_LEAST64,
+ 'int_fast8_t': PRIM_INT_FAST8,
+ 'uint_fast8_t': PRIM_UINT_FAST8,
+ 'int_fast16_t': PRIM_INT_FAST16,
+ 'uint_fast16_t': PRIM_UINT_FAST16,
+ 'int_fast32_t': PRIM_INT_FAST32,
+ 'uint_fast32_t': PRIM_UINT_FAST32,
+ 'int_fast64_t': PRIM_INT_FAST64,
+ 'uint_fast64_t': PRIM_UINT_FAST64,
+ 'intmax_t': PRIM_INTMAX,
+ 'uintmax_t': PRIM_UINTMAX,
+ }
+
+F_UNION = 0x01
+F_CHECK_FIELDS = 0x02
+F_PACKED = 0x04
+F_EXTERNAL = 0x08
+F_OPAQUE = 0x10
+
+G_FLAGS = dict([('_CFFI_' + _key, globals()[_key])
+ for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED',
+ 'F_EXTERNAL', 'F_OPAQUE']])
+
+CLASS_NAME = {}
+for _name, _value in list(globals().items()):
+ if _name.startswith('OP_') and isinstance(_value, int):
+ CLASS_NAME[_value] = _name[3:]
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/commontypes.py b/Backend/venv/lib/python3.12/site-packages/cffi/commontypes.py
new file mode 100644
index 00000000..d4dae351
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/commontypes.py
@@ -0,0 +1,82 @@
+import sys
+from . import model
+from .error import FFIError
+
+
+COMMON_TYPES = {}
+
+try:
+ # fetch "bool" and all simple Windows types
+ from _cffi_backend import _get_common_types
+ _get_common_types(COMMON_TYPES)
+except ImportError:
+ pass
+
+COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE')
+COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above
+COMMON_TYPES['float _Complex'] = '_cffi_float_complex_t'
+COMMON_TYPES['double _Complex'] = '_cffi_double_complex_t'
+
+for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
+ if _type.endswith('_t'):
+ COMMON_TYPES[_type] = _type
+del _type
+
+_CACHE = {}
+
+def resolve_common_type(parser, commontype):
+ try:
+ return _CACHE[commontype]
+ except KeyError:
+ cdecl = COMMON_TYPES.get(commontype, commontype)
+ if not isinstance(cdecl, str):
+ result, quals = cdecl, 0 # cdecl is already a BaseType
+ elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
+ result, quals = model.PrimitiveType(cdecl), 0
+ elif cdecl == 'set-unicode-needed':
+ raise FFIError("The Windows type %r is only available after "
+ "you call ffi.set_unicode()" % (commontype,))
+ else:
+ if commontype == cdecl:
+ raise FFIError(
+ "Unsupported type: %r. Please look at "
+ "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations "
+ "and file an issue if you think this type should really "
+ "be supported." % (commontype,))
+ result, quals = parser.parse_type_and_quals(cdecl) # recursive
+
+ assert isinstance(result, model.BaseTypeByIdentity)
+ _CACHE[commontype] = result, quals
+ return result, quals
+
+
+# ____________________________________________________________
+# extra types for Windows (most of them are in commontypes.c)
+
+
+def win_common_types():
+ return {
+ "UNICODE_STRING": model.StructType(
+ "_UNICODE_STRING",
+ ["Length",
+ "MaximumLength",
+ "Buffer"],
+ [model.PrimitiveType("unsigned short"),
+ model.PrimitiveType("unsigned short"),
+ model.PointerType(model.PrimitiveType("wchar_t"))],
+ [-1, -1, -1]),
+ "PUNICODE_STRING": "UNICODE_STRING *",
+ "PCUNICODE_STRING": "const UNICODE_STRING *",
+
+ "TBYTE": "set-unicode-needed",
+ "TCHAR": "set-unicode-needed",
+ "LPCTSTR": "set-unicode-needed",
+ "PCTSTR": "set-unicode-needed",
+ "LPTSTR": "set-unicode-needed",
+ "PTSTR": "set-unicode-needed",
+ "PTBYTE": "set-unicode-needed",
+ "PTCHAR": "set-unicode-needed",
+ }
+
+if sys.platform == 'win32':
+ COMMON_TYPES.update(win_common_types())
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/cparser.py b/Backend/venv/lib/python3.12/site-packages/cffi/cparser.py
new file mode 100644
index 00000000..dd590d87
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/cparser.py
@@ -0,0 +1,1015 @@
+from . import model
+from .commontypes import COMMON_TYPES, resolve_common_type
+from .error import FFIError, CDefError
+try:
+ from . import _pycparser as pycparser
+except ImportError:
+ import pycparser
+import weakref, re, sys
+
+try:
+ if sys.version_info < (3,):
+ import thread as _thread
+ else:
+ import _thread
+ lock = _thread.allocate_lock()
+except ImportError:
+ lock = None
+
+def _workaround_for_static_import_finders():
+ # Issue #392: packaging tools like cx_Freeze can not find these
+ # because pycparser uses exec dynamic import. This is an obscure
+ # workaround. This function is never called.
+ import pycparser.yacctab
+ import pycparser.lextab
+
+CDEF_SOURCE_STRING = ""
+_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$",
+ re.DOTALL | re.MULTILINE)
+_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)"
+ r"\b((?:[^\n\\]|\\.)*?)$",
+ re.DOTALL | re.MULTILINE)
+_r_line_directive = re.compile(r"^[ \t]*#[ \t]*(?:line|\d+)\b.*$", re.MULTILINE)
+_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}")
+_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$")
+_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]")
+_r_words = re.compile(r"\w+|\S")
+_parser_cache = None
+_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE)
+_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b")
+_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b")
+_r_cdecl = re.compile(r"\b__cdecl\b")
+_r_extern_python = re.compile(r'\bextern\s*"'
+ r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.')
+_r_star_const_space = re.compile( # matches "* const "
+ r"[*]\s*((const|volatile|restrict)\b\s*)+")
+_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+"
+ r"\.\.\.")
+_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.")
+
+def _get_parser():
+ global _parser_cache
+ if _parser_cache is None:
+ _parser_cache = pycparser.CParser()
+ return _parser_cache
+
+def _workaround_for_old_pycparser(csource):
+ # Workaround for a pycparser issue (fixed between pycparser 2.10 and
+ # 2.14): "char*const***" gives us a wrong syntax tree, the same as
+ # for "char***(*const)". This means we can't tell the difference
+ # afterwards. But "char(*const(***))" gives us the right syntax
+ # tree. The issue only occurs if there are several stars in
+ # sequence with no parenthesis in between, just possibly qualifiers.
+ # Attempt to fix it by adding some parentheses in the source: each
+ # time we see "* const" or "* const *", we add an opening
+ # parenthesis before each star---the hard part is figuring out where
+ # to close them.
+ parts = []
+ while True:
+ match = _r_star_const_space.search(csource)
+ if not match:
+ break
+ #print repr(''.join(parts)+csource), '=>',
+ parts.append(csource[:match.start()])
+ parts.append('('); closing = ')'
+ parts.append(match.group()) # e.g. "* const "
+ endpos = match.end()
+ if csource.startswith('*', endpos):
+ parts.append('('); closing += ')'
+ level = 0
+ i = endpos
+ while i < len(csource):
+ c = csource[i]
+ if c == '(':
+ level += 1
+ elif c == ')':
+ if level == 0:
+ break
+ level -= 1
+ elif c in ',;=':
+ if level == 0:
+ break
+ i += 1
+ csource = csource[endpos:i] + closing + csource[i:]
+ #print repr(''.join(parts)+csource)
+ parts.append(csource)
+ return ''.join(parts)
+
+def _preprocess_extern_python(csource):
+ # input: `extern "Python" int foo(int);` or
+ # `extern "Python" { int foo(int); }`
+ # output:
+ # void __cffi_extern_python_start;
+ # int foo(int);
+ # void __cffi_extern_python_stop;
+ #
+ # input: `extern "Python+C" int foo(int);`
+ # output:
+ # void __cffi_extern_python_plus_c_start;
+ # int foo(int);
+ # void __cffi_extern_python_stop;
+ parts = []
+ while True:
+ match = _r_extern_python.search(csource)
+ if not match:
+ break
+ endpos = match.end() - 1
+ #print
+ #print ''.join(parts)+csource
+ #print '=>'
+ parts.append(csource[:match.start()])
+ if 'C' in match.group(1):
+ parts.append('void __cffi_extern_python_plus_c_start; ')
+ else:
+ parts.append('void __cffi_extern_python_start; ')
+ if csource[endpos] == '{':
+ # grouping variant
+ closing = csource.find('}', endpos)
+ if closing < 0:
+ raise CDefError("'extern \"Python\" {': no '}' found")
+ if csource.find('{', endpos + 1, closing) >= 0:
+ raise NotImplementedError("cannot use { } inside a block "
+ "'extern \"Python\" { ... }'")
+ parts.append(csource[endpos+1:closing])
+ csource = csource[closing+1:]
+ else:
+ # non-grouping variant
+ semicolon = csource.find(';', endpos)
+ if semicolon < 0:
+ raise CDefError("'extern \"Python\": no ';' found")
+ parts.append(csource[endpos:semicolon+1])
+ csource = csource[semicolon+1:]
+ parts.append(' void __cffi_extern_python_stop;')
+ #print ''.join(parts)+csource
+ #print
+ parts.append(csource)
+ return ''.join(parts)
+
+def _warn_for_string_literal(csource):
+ if '"' not in csource:
+ return
+ for line in csource.splitlines():
+ if '"' in line and not line.lstrip().startswith('#'):
+ import warnings
+ warnings.warn("String literal found in cdef() or type source. "
+ "String literals are ignored here, but you should "
+ "remove them anyway because some character sequences "
+ "confuse pre-parsing.")
+ break
+
+def _warn_for_non_extern_non_static_global_variable(decl):
+ if not decl.storage:
+ import warnings
+ warnings.warn("Global variable '%s' in cdef(): for consistency "
+ "with C it should have a storage class specifier "
+ "(usually 'extern')" % (decl.name,))
+
+def _remove_line_directives(csource):
+ # _r_line_directive matches whole lines, without the final \n, if they
+ # start with '#line' with some spacing allowed, or '#NUMBER'. This
+ # function stores them away and replaces them with exactly the string
+ # '#line@N', where N is the index in the list 'line_directives'.
+ line_directives = []
+ def replace(m):
+ i = len(line_directives)
+ line_directives.append(m.group())
+ return '#line@%d' % i
+ csource = _r_line_directive.sub(replace, csource)
+ return csource, line_directives
+
+def _put_back_line_directives(csource, line_directives):
+ def replace(m):
+ s = m.group()
+ if not s.startswith('#line@'):
+ raise AssertionError("unexpected #line directive "
+ "(should have been processed and removed")
+ return line_directives[int(s[6:])]
+ return _r_line_directive.sub(replace, csource)
+
+def _preprocess(csource):
+ # First, remove the lines of the form '#line N "filename"' because
+ # the "filename" part could confuse the rest
+ csource, line_directives = _remove_line_directives(csource)
+ # Remove comments. NOTE: this only work because the cdef() section
+ # should not contain any string literals (except in line directives)!
+ def replace_keeping_newlines(m):
+ return ' ' + m.group().count('\n') * '\n'
+ csource = _r_comment.sub(replace_keeping_newlines, csource)
+ # Remove the "#define FOO x" lines
+ macros = {}
+ for match in _r_define.finditer(csource):
+ macroname, macrovalue = match.groups()
+ macrovalue = macrovalue.replace('\\\n', '').strip()
+ macros[macroname] = macrovalue
+ csource = _r_define.sub('', csource)
+ #
+ if pycparser.__version__ < '2.14':
+ csource = _workaround_for_old_pycparser(csource)
+ #
+ # BIG HACK: replace WINAPI or __stdcall with "volatile const".
+ # It doesn't make sense for the return type of a function to be
+ # "volatile volatile const", so we abuse it to detect __stdcall...
+ # Hack number 2 is that "int(volatile *fptr)();" is not valid C
+ # syntax, so we place the "volatile" before the opening parenthesis.
+ csource = _r_stdcall2.sub(' volatile volatile const(', csource)
+ csource = _r_stdcall1.sub(' volatile volatile const ', csource)
+ csource = _r_cdecl.sub(' ', csource)
+ #
+ # Replace `extern "Python"` with start/end markers
+ csource = _preprocess_extern_python(csource)
+ #
+ # Now there should not be any string literal left; warn if we get one
+ _warn_for_string_literal(csource)
+ #
+ # Replace "[...]" with "[__dotdotdotarray__]"
+ csource = _r_partial_array.sub('[__dotdotdotarray__]', csource)
+ #
+ # Replace "...}" with "__dotdotdotNUM__}". This construction should
+ # occur only at the end of enums; at the end of structs we have "...;}"
+ # and at the end of vararg functions "...);". Also replace "=...[,}]"
+ # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when
+ # giving an unknown value.
+ matches = list(_r_partial_enum.finditer(csource))
+ for number, match in enumerate(reversed(matches)):
+ p = match.start()
+ if csource[p] == '=':
+ p2 = csource.find('...', p, match.end())
+ assert p2 > p
+ csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number,
+ csource[p2+3:])
+ else:
+ assert csource[p:p+3] == '...'
+ csource = '%s __dotdotdot%d__ %s' % (csource[:p], number,
+ csource[p+3:])
+ # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__"
+ csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource)
+ # Replace "float ..." or "double..." with "__dotdotdotfloat__"
+ csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource)
+ # Replace all remaining "..." with the same name, "__dotdotdot__",
+ # which is declared with a typedef for the purpose of C parsing.
+ csource = csource.replace('...', ' __dotdotdot__ ')
+ # Finally, put back the line directives
+ csource = _put_back_line_directives(csource, line_directives)
+ return csource, macros
+
+def _common_type_names(csource):
+ # Look in the source for what looks like usages of types from the
+ # list of common types. A "usage" is approximated here as the
+ # appearance of the word, minus a "definition" of the type, which
+ # is the last word in a "typedef" statement. Approximative only
+ # but should be fine for all the common types.
+ look_for_words = set(COMMON_TYPES)
+ look_for_words.add(';')
+ look_for_words.add(',')
+ look_for_words.add('(')
+ look_for_words.add(')')
+ look_for_words.add('typedef')
+ words_used = set()
+ is_typedef = False
+ paren = 0
+ previous_word = ''
+ for word in _r_words.findall(csource):
+ if word in look_for_words:
+ if word == ';':
+ if is_typedef:
+ words_used.discard(previous_word)
+ look_for_words.discard(previous_word)
+ is_typedef = False
+ elif word == 'typedef':
+ is_typedef = True
+ paren = 0
+ elif word == '(':
+ paren += 1
+ elif word == ')':
+ paren -= 1
+ elif word == ',':
+ if is_typedef and paren == 0:
+ words_used.discard(previous_word)
+ look_for_words.discard(previous_word)
+ else: # word in COMMON_TYPES
+ words_used.add(word)
+ previous_word = word
+ return words_used
+
+
+class Parser(object):
+
+ def __init__(self):
+ self._declarations = {}
+ self._included_declarations = set()
+ self._anonymous_counter = 0
+ self._structnode2type = weakref.WeakKeyDictionary()
+ self._options = {}
+ self._int_constants = {}
+ self._recomplete = []
+ self._uses_new_feature = None
+
+ def _parse(self, csource):
+ csource, macros = _preprocess(csource)
+ # XXX: for more efficiency we would need to poke into the
+ # internals of CParser... the following registers the
+ # typedefs, because their presence or absence influences the
+ # parsing itself (but what they are typedef'ed to plays no role)
+ ctn = _common_type_names(csource)
+ typenames = []
+ for name in sorted(self._declarations):
+ if name.startswith('typedef '):
+ name = name[8:]
+ typenames.append(name)
+ ctn.discard(name)
+ typenames += sorted(ctn)
+ #
+ csourcelines = []
+ csourcelines.append('# 1 ""')
+ for typename in typenames:
+ csourcelines.append('typedef int %s;' % typename)
+ csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,'
+ ' __dotdotdot__;')
+ # this forces pycparser to consider the following in the file
+ # called from line 1
+ csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,))
+ csourcelines.append(csource)
+ csourcelines.append('') # see test_missing_newline_bug
+ fullcsource = '\n'.join(csourcelines)
+ if lock is not None:
+ lock.acquire() # pycparser is not thread-safe...
+ try:
+ ast = _get_parser().parse(fullcsource)
+ except pycparser.c_parser.ParseError as e:
+ self.convert_pycparser_error(e, csource)
+ finally:
+ if lock is not None:
+ lock.release()
+ # csource will be used to find buggy source text
+ return ast, macros, csource
+
+ def _convert_pycparser_error(self, e, csource):
+ # xxx look for ":NUM:" at the start of str(e)
+ # and interpret that as a line number. This will not work if
+ # the user gives explicit ``# NUM "FILE"`` directives.
+ line = None
+ msg = str(e)
+ match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg)
+ if match:
+ linenum = int(match.group(1), 10)
+ csourcelines = csource.splitlines()
+ if 1 <= linenum <= len(csourcelines):
+ line = csourcelines[linenum-1]
+ return line
+
+ def convert_pycparser_error(self, e, csource):
+ line = self._convert_pycparser_error(e, csource)
+
+ msg = str(e)
+ if line:
+ msg = 'cannot parse "%s"\n%s' % (line.strip(), msg)
+ else:
+ msg = 'parse error\n%s' % (msg,)
+ raise CDefError(msg)
+
+ def parse(self, csource, override=False, packed=False, pack=None,
+ dllexport=False):
+ if packed:
+ if packed != True:
+ raise ValueError("'packed' should be False or True; use "
+ "'pack' to give another value")
+ if pack:
+ raise ValueError("cannot give both 'pack' and 'packed'")
+ pack = 1
+ elif pack:
+ if pack & (pack - 1):
+ raise ValueError("'pack' must be a power of two, not %r" %
+ (pack,))
+ else:
+ pack = 0
+ prev_options = self._options
+ try:
+ self._options = {'override': override,
+ 'packed': pack,
+ 'dllexport': dllexport}
+ self._internal_parse(csource)
+ finally:
+ self._options = prev_options
+
+ def _internal_parse(self, csource):
+ ast, macros, csource = self._parse(csource)
+ # add the macros
+ self._process_macros(macros)
+ # find the first "__dotdotdot__" and use that as a separator
+ # between the repeated typedefs and the real csource
+ iterator = iter(ast.ext)
+ for decl in iterator:
+ if decl.name == '__dotdotdot__':
+ break
+ else:
+ assert 0
+ current_decl = None
+ #
+ try:
+ self._inside_extern_python = '__cffi_extern_python_stop'
+ for decl in iterator:
+ current_decl = decl
+ if isinstance(decl, pycparser.c_ast.Decl):
+ self._parse_decl(decl)
+ elif isinstance(decl, pycparser.c_ast.Typedef):
+ if not decl.name:
+ raise CDefError("typedef does not declare any name",
+ decl)
+ quals = 0
+ if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and
+ decl.type.type.names[-1].startswith('__dotdotdot')):
+ realtype = self._get_unknown_type(decl)
+ elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and
+ isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and
+ isinstance(decl.type.type.type,
+ pycparser.c_ast.IdentifierType) and
+ decl.type.type.type.names[-1].startswith('__dotdotdot')):
+ realtype = self._get_unknown_ptr_type(decl)
+ else:
+ realtype, quals = self._get_type_and_quals(
+ decl.type, name=decl.name, partial_length_ok=True,
+ typedef_example="*(%s *)0" % (decl.name,))
+ self._declare('typedef ' + decl.name, realtype, quals=quals)
+ elif decl.__class__.__name__ == 'Pragma':
+ # skip pragma, only in pycparser 2.15
+ import warnings
+ warnings.warn(
+ "#pragma in cdef() are entirely ignored. "
+ "They should be removed for now, otherwise your "
+ "code might behave differently in a future version "
+ "of CFFI if #pragma support gets added. Note that "
+ "'#pragma pack' needs to be replaced with the "
+ "'packed' keyword argument to cdef().")
+ else:
+ raise CDefError("unexpected <%s>: this construct is valid "
+ "C but not valid in cdef()" %
+ decl.__class__.__name__, decl)
+ except CDefError as e:
+ if len(e.args) == 1:
+ e.args = e.args + (current_decl,)
+ raise
+ except FFIError as e:
+ msg = self._convert_pycparser_error(e, csource)
+ if msg:
+ e.args = (e.args[0] + "\n *** Err: %s" % msg,)
+ raise
+
+ def _add_constants(self, key, val):
+ if key in self._int_constants:
+ if self._int_constants[key] == val:
+ return # ignore identical double declarations
+ raise FFIError(
+ "multiple declarations of constant: %s" % (key,))
+ self._int_constants[key] = val
+
+ def _add_integer_constant(self, name, int_str):
+ int_str = int_str.lower().rstrip("ul")
+ neg = int_str.startswith('-')
+ if neg:
+ int_str = int_str[1:]
+ # "010" is not valid oct in py3
+ if (int_str.startswith("0") and int_str != '0'
+ and not int_str.startswith("0x")):
+ int_str = "0o" + int_str[1:]
+ pyvalue = int(int_str, 0)
+ if neg:
+ pyvalue = -pyvalue
+ self._add_constants(name, pyvalue)
+ self._declare('macro ' + name, pyvalue)
+
+ def _process_macros(self, macros):
+ for key, value in macros.items():
+ value = value.strip()
+ if _r_int_literal.match(value):
+ self._add_integer_constant(key, value)
+ elif value == '...':
+ self._declare('macro ' + key, value)
+ else:
+ raise CDefError(
+ 'only supports one of the following syntax:\n'
+ ' #define %s ... (literally dot-dot-dot)\n'
+ ' #define %s NUMBER (with NUMBER an integer'
+ ' constant, decimal/hex/octal)\n'
+ 'got:\n'
+ ' #define %s %s'
+ % (key, key, key, value))
+
+ def _declare_function(self, tp, quals, decl):
+ tp = self._get_type_pointer(tp, quals)
+ if self._options.get('dllexport'):
+ tag = 'dllexport_python '
+ elif self._inside_extern_python == '__cffi_extern_python_start':
+ tag = 'extern_python '
+ elif self._inside_extern_python == '__cffi_extern_python_plus_c_start':
+ tag = 'extern_python_plus_c '
+ else:
+ tag = 'function '
+ self._declare(tag + decl.name, tp)
+
+ def _parse_decl(self, decl):
+ node = decl.type
+ if isinstance(node, pycparser.c_ast.FuncDecl):
+ tp, quals = self._get_type_and_quals(node, name=decl.name)
+ assert isinstance(tp, model.RawFunctionType)
+ self._declare_function(tp, quals, decl)
+ else:
+ if isinstance(node, pycparser.c_ast.Struct):
+ self._get_struct_union_enum_type('struct', node)
+ elif isinstance(node, pycparser.c_ast.Union):
+ self._get_struct_union_enum_type('union', node)
+ elif isinstance(node, pycparser.c_ast.Enum):
+ self._get_struct_union_enum_type('enum', node)
+ elif not decl.name:
+ raise CDefError("construct does not declare any variable",
+ decl)
+ #
+ if decl.name:
+ tp, quals = self._get_type_and_quals(node,
+ partial_length_ok=True)
+ if tp.is_raw_function:
+ self._declare_function(tp, quals, decl)
+ elif (tp.is_integer_type() and
+ hasattr(decl, 'init') and
+ hasattr(decl.init, 'value') and
+ _r_int_literal.match(decl.init.value)):
+ self._add_integer_constant(decl.name, decl.init.value)
+ elif (tp.is_integer_type() and
+ isinstance(decl.init, pycparser.c_ast.UnaryOp) and
+ decl.init.op == '-' and
+ hasattr(decl.init.expr, 'value') and
+ _r_int_literal.match(decl.init.expr.value)):
+ self._add_integer_constant(decl.name,
+ '-' + decl.init.expr.value)
+ elif (tp is model.void_type and
+ decl.name.startswith('__cffi_extern_python_')):
+ # hack: `extern "Python"` in the C source is replaced
+ # with "void __cffi_extern_python_start;" and
+ # "void __cffi_extern_python_stop;"
+ self._inside_extern_python = decl.name
+ else:
+ if self._inside_extern_python !='__cffi_extern_python_stop':
+ raise CDefError(
+ "cannot declare constants or "
+ "variables with 'extern \"Python\"'")
+ if (quals & model.Q_CONST) and not tp.is_array_type:
+ self._declare('constant ' + decl.name, tp, quals=quals)
+ else:
+ _warn_for_non_extern_non_static_global_variable(decl)
+ self._declare('variable ' + decl.name, tp, quals=quals)
+
+ def parse_type(self, cdecl):
+ return self.parse_type_and_quals(cdecl)[0]
+
+ def parse_type_and_quals(self, cdecl):
+ ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2]
+ assert not macros
+ exprnode = ast.ext[-1].type.args.params[0]
+ if isinstance(exprnode, pycparser.c_ast.ID):
+ raise CDefError("unknown identifier '%s'" % (exprnode.name,))
+ return self._get_type_and_quals(exprnode.type)
+
+ def _declare(self, name, obj, included=False, quals=0):
+ if name in self._declarations:
+ prevobj, prevquals = self._declarations[name]
+ if prevobj is obj and prevquals == quals:
+ return
+ if not self._options.get('override'):
+ raise FFIError(
+ "multiple declarations of %s (for interactive usage, "
+ "try cdef(xx, override=True))" % (name,))
+ assert '__dotdotdot__' not in name.split()
+ self._declarations[name] = (obj, quals)
+ if included:
+ self._included_declarations.add(obj)
+
+ def _extract_quals(self, type):
+ quals = 0
+ if isinstance(type, (pycparser.c_ast.TypeDecl,
+ pycparser.c_ast.PtrDecl)):
+ if 'const' in type.quals:
+ quals |= model.Q_CONST
+ if 'volatile' in type.quals:
+ quals |= model.Q_VOLATILE
+ if 'restrict' in type.quals:
+ quals |= model.Q_RESTRICT
+ return quals
+
+ def _get_type_pointer(self, type, quals, declname=None):
+ if isinstance(type, model.RawFunctionType):
+ return type.as_function_pointer()
+ if (isinstance(type, model.StructOrUnionOrEnum) and
+ type.name.startswith('$') and type.name[1:].isdigit() and
+ type.forcename is None and declname is not None):
+ return model.NamedPointerType(type, declname, quals)
+ return model.PointerType(type, quals)
+
+ def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False,
+ typedef_example=None):
+ # first, dereference typedefs, if we have it already parsed, we're good
+ if (isinstance(typenode, pycparser.c_ast.TypeDecl) and
+ isinstance(typenode.type, pycparser.c_ast.IdentifierType) and
+ len(typenode.type.names) == 1 and
+ ('typedef ' + typenode.type.names[0]) in self._declarations):
+ tp, quals = self._declarations['typedef ' + typenode.type.names[0]]
+ quals |= self._extract_quals(typenode)
+ return tp, quals
+ #
+ if isinstance(typenode, pycparser.c_ast.ArrayDecl):
+ # array type
+ if typenode.dim is None:
+ length = None
+ else:
+ length = self._parse_constant(
+ typenode.dim, partial_length_ok=partial_length_ok)
+ # a hack: in 'typedef int foo_t[...][...];', don't use '...' as
+ # the length but use directly the C expression that would be
+ # generated by recompiler.py. This lets the typedef be used in
+ # many more places within recompiler.py
+ if typedef_example is not None:
+ if length == '...':
+ length = '_cffi_array_len(%s)' % (typedef_example,)
+ typedef_example = "*" + typedef_example
+ #
+ tp, quals = self._get_type_and_quals(typenode.type,
+ partial_length_ok=partial_length_ok,
+ typedef_example=typedef_example)
+ return model.ArrayType(tp, length), quals
+ #
+ if isinstance(typenode, pycparser.c_ast.PtrDecl):
+ # pointer type
+ itemtype, itemquals = self._get_type_and_quals(typenode.type)
+ tp = self._get_type_pointer(itemtype, itemquals, declname=name)
+ quals = self._extract_quals(typenode)
+ return tp, quals
+ #
+ if isinstance(typenode, pycparser.c_ast.TypeDecl):
+ quals = self._extract_quals(typenode)
+ type = typenode.type
+ if isinstance(type, pycparser.c_ast.IdentifierType):
+ # assume a primitive type. get it from .names, but reduce
+ # synonyms to a single chosen combination
+ names = list(type.names)
+ if names != ['signed', 'char']: # keep this unmodified
+ prefixes = {}
+ while names:
+ name = names[0]
+ if name in ('short', 'long', 'signed', 'unsigned'):
+ prefixes[name] = prefixes.get(name, 0) + 1
+ del names[0]
+ else:
+ break
+ # ignore the 'signed' prefix below, and reorder the others
+ newnames = []
+ for prefix in ('unsigned', 'short', 'long'):
+ for i in range(prefixes.get(prefix, 0)):
+ newnames.append(prefix)
+ if not names:
+ names = ['int'] # implicitly
+ if names == ['int']: # but kill it if 'short' or 'long'
+ if 'short' in prefixes or 'long' in prefixes:
+ names = []
+ names = newnames + names
+ ident = ' '.join(names)
+ if ident == 'void':
+ return model.void_type, quals
+ if ident == '__dotdotdot__':
+ raise FFIError(':%d: bad usage of "..."' %
+ typenode.coord.line)
+ tp0, quals0 = resolve_common_type(self, ident)
+ return tp0, (quals | quals0)
+ #
+ if isinstance(type, pycparser.c_ast.Struct):
+ # 'struct foobar'
+ tp = self._get_struct_union_enum_type('struct', type, name)
+ return tp, quals
+ #
+ if isinstance(type, pycparser.c_ast.Union):
+ # 'union foobar'
+ tp = self._get_struct_union_enum_type('union', type, name)
+ return tp, quals
+ #
+ if isinstance(type, pycparser.c_ast.Enum):
+ # 'enum foobar'
+ tp = self._get_struct_union_enum_type('enum', type, name)
+ return tp, quals
+ #
+ if isinstance(typenode, pycparser.c_ast.FuncDecl):
+ # a function type
+ return self._parse_function_type(typenode, name), 0
+ #
+ # nested anonymous structs or unions end up here
+ if isinstance(typenode, pycparser.c_ast.Struct):
+ return self._get_struct_union_enum_type('struct', typenode, name,
+ nested=True), 0
+ if isinstance(typenode, pycparser.c_ast.Union):
+ return self._get_struct_union_enum_type('union', typenode, name,
+ nested=True), 0
+ #
+ raise FFIError(":%d: bad or unsupported type declaration" %
+ typenode.coord.line)
+
+ def _parse_function_type(self, typenode, funcname=None):
+ params = list(getattr(typenode.args, 'params', []))
+ for i, arg in enumerate(params):
+ if not hasattr(arg, 'type'):
+ raise CDefError("%s arg %d: unknown type '%s'"
+ " (if you meant to use the old C syntax of giving"
+ " untyped arguments, it is not supported)"
+ % (funcname or 'in expression', i + 1,
+ getattr(arg, 'name', '?')))
+ ellipsis = (
+ len(params) > 0 and
+ isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and
+ isinstance(params[-1].type.type,
+ pycparser.c_ast.IdentifierType) and
+ params[-1].type.type.names == ['__dotdotdot__'])
+ if ellipsis:
+ params.pop()
+ if not params:
+ raise CDefError(
+ "%s: a function with only '(...)' as argument"
+ " is not correct C" % (funcname or 'in expression'))
+ args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type))
+ for argdeclnode in params]
+ if not ellipsis and args == [model.void_type]:
+ args = []
+ result, quals = self._get_type_and_quals(typenode.type)
+ # the 'quals' on the result type are ignored. HACK: we absure them
+ # to detect __stdcall functions: we textually replace "__stdcall"
+ # with "volatile volatile const" above.
+ abi = None
+ if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway
+ if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']:
+ abi = '__stdcall'
+ return model.RawFunctionType(tuple(args), result, ellipsis, abi)
+
+ def _as_func_arg(self, type, quals):
+ if isinstance(type, model.ArrayType):
+ return model.PointerType(type.item, quals)
+ elif isinstance(type, model.RawFunctionType):
+ return type.as_function_pointer()
+ else:
+ return type
+
+ def _get_struct_union_enum_type(self, kind, type, name=None, nested=False):
+ # First, a level of caching on the exact 'type' node of the AST.
+ # This is obscure, but needed because pycparser "unrolls" declarations
+ # such as "typedef struct { } foo_t, *foo_p" and we end up with
+ # an AST that is not a tree, but a DAG, with the "type" node of the
+ # two branches foo_t and foo_p of the trees being the same node.
+ # It's a bit silly but detecting "DAG-ness" in the AST tree seems
+ # to be the only way to distinguish this case from two independent
+ # structs. See test_struct_with_two_usages.
+ try:
+ return self._structnode2type[type]
+ except KeyError:
+ pass
+ #
+ # Note that this must handle parsing "struct foo" any number of
+ # times and always return the same StructType object. Additionally,
+ # one of these times (not necessarily the first), the fields of
+ # the struct can be specified with "struct foo { ...fields... }".
+ # If no name is given, then we have to create a new anonymous struct
+ # with no caching; in this case, the fields are either specified
+ # right now or never.
+ #
+ force_name = name
+ name = type.name
+ #
+ # get the type or create it if needed
+ if name is None:
+ # 'force_name' is used to guess a more readable name for
+ # anonymous structs, for the common case "typedef struct { } foo".
+ if force_name is not None:
+ explicit_name = '$%s' % force_name
+ else:
+ self._anonymous_counter += 1
+ explicit_name = '$%d' % self._anonymous_counter
+ tp = None
+ else:
+ explicit_name = name
+ key = '%s %s' % (kind, name)
+ tp, _ = self._declarations.get(key, (None, None))
+ #
+ if tp is None:
+ if kind == 'struct':
+ tp = model.StructType(explicit_name, None, None, None)
+ elif kind == 'union':
+ tp = model.UnionType(explicit_name, None, None, None)
+ elif kind == 'enum':
+ if explicit_name == '__dotdotdot__':
+ raise CDefError("Enums cannot be declared with ...")
+ tp = self._build_enum_type(explicit_name, type.values)
+ else:
+ raise AssertionError("kind = %r" % (kind,))
+ if name is not None:
+ self._declare(key, tp)
+ else:
+ if kind == 'enum' and type.values is not None:
+ raise NotImplementedError(
+ "enum %s: the '{}' declaration should appear on the first "
+ "time the enum is mentioned, not later" % explicit_name)
+ if not tp.forcename:
+ tp.force_the_name(force_name)
+ if tp.forcename and '$' in tp.name:
+ self._declare('anonymous %s' % tp.forcename, tp)
+ #
+ self._structnode2type[type] = tp
+ #
+ # enums: done here
+ if kind == 'enum':
+ return tp
+ #
+ # is there a 'type.decls'? If yes, then this is the place in the
+ # C sources that declare the fields. If no, then just return the
+ # existing type, possibly still incomplete.
+ if type.decls is None:
+ return tp
+ #
+ if tp.fldnames is not None:
+ raise CDefError("duplicate declaration of struct %s" % name)
+ fldnames = []
+ fldtypes = []
+ fldbitsize = []
+ fldquals = []
+ for decl in type.decls:
+ if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and
+ ''.join(decl.type.names) == '__dotdotdot__'):
+ # XXX pycparser is inconsistent: 'names' should be a list
+ # of strings, but is sometimes just one string. Use
+ # str.join() as a way to cope with both.
+ self._make_partial(tp, nested)
+ continue
+ if decl.bitsize is None:
+ bitsize = -1
+ else:
+ bitsize = self._parse_constant(decl.bitsize)
+ self._partial_length = False
+ type, fqual = self._get_type_and_quals(decl.type,
+ partial_length_ok=True)
+ if self._partial_length:
+ self._make_partial(tp, nested)
+ if isinstance(type, model.StructType) and type.partial:
+ self._make_partial(tp, nested)
+ fldnames.append(decl.name or '')
+ fldtypes.append(type)
+ fldbitsize.append(bitsize)
+ fldquals.append(fqual)
+ tp.fldnames = tuple(fldnames)
+ tp.fldtypes = tuple(fldtypes)
+ tp.fldbitsize = tuple(fldbitsize)
+ tp.fldquals = tuple(fldquals)
+ if fldbitsize != [-1] * len(fldbitsize):
+ if isinstance(tp, model.StructType) and tp.partial:
+ raise NotImplementedError("%s: using both bitfields and '...;'"
+ % (tp,))
+ tp.packed = self._options.get('packed')
+ if tp.completed: # must be re-completed: it is not opaque any more
+ tp.completed = 0
+ self._recomplete.append(tp)
+ return tp
+
+ def _make_partial(self, tp, nested):
+ if not isinstance(tp, model.StructOrUnion):
+ raise CDefError("%s cannot be partial" % (tp,))
+ if not tp.has_c_name() and not nested:
+ raise NotImplementedError("%s is partial but has no C name" %(tp,))
+ tp.partial = True
+
+ def _parse_constant(self, exprnode, partial_length_ok=False):
+ # for now, limited to expressions that are an immediate number
+ # or positive/negative number
+ if isinstance(exprnode, pycparser.c_ast.Constant):
+ s = exprnode.value
+ if '0' <= s[0] <= '9':
+ s = s.rstrip('uUlL')
+ try:
+ if s.startswith('0'):
+ return int(s, 8)
+ else:
+ return int(s, 10)
+ except ValueError:
+ if len(s) > 1:
+ if s.lower()[0:2] == '0x':
+ return int(s, 16)
+ elif s.lower()[0:2] == '0b':
+ return int(s, 2)
+ raise CDefError("invalid constant %r" % (s,))
+ elif s[0] == "'" and s[-1] == "'" and (
+ len(s) == 3 or (len(s) == 4 and s[1] == "\\")):
+ return ord(s[-2])
+ else:
+ raise CDefError("invalid constant %r" % (s,))
+ #
+ if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
+ exprnode.op == '+'):
+ return self._parse_constant(exprnode.expr)
+ #
+ if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
+ exprnode.op == '-'):
+ return -self._parse_constant(exprnode.expr)
+ # load previously defined int constant
+ if (isinstance(exprnode, pycparser.c_ast.ID) and
+ exprnode.name in self._int_constants):
+ return self._int_constants[exprnode.name]
+ #
+ if (isinstance(exprnode, pycparser.c_ast.ID) and
+ exprnode.name == '__dotdotdotarray__'):
+ if partial_length_ok:
+ self._partial_length = True
+ return '...'
+ raise FFIError(":%d: unsupported '[...]' here, cannot derive "
+ "the actual array length in this context"
+ % exprnode.coord.line)
+ #
+ if isinstance(exprnode, pycparser.c_ast.BinaryOp):
+ left = self._parse_constant(exprnode.left)
+ right = self._parse_constant(exprnode.right)
+ if exprnode.op == '+':
+ return left + right
+ elif exprnode.op == '-':
+ return left - right
+ elif exprnode.op == '*':
+ return left * right
+ elif exprnode.op == '/':
+ return self._c_div(left, right)
+ elif exprnode.op == '%':
+ return left - self._c_div(left, right) * right
+ elif exprnode.op == '<<':
+ return left << right
+ elif exprnode.op == '>>':
+ return left >> right
+ elif exprnode.op == '&':
+ return left & right
+ elif exprnode.op == '|':
+ return left | right
+ elif exprnode.op == '^':
+ return left ^ right
+ #
+ raise FFIError(":%d: unsupported expression: expected a "
+ "simple numeric constant" % exprnode.coord.line)
+
+ def _c_div(self, a, b):
+ result = a // b
+ if ((a < 0) ^ (b < 0)) and (a % b) != 0:
+ result += 1
+ return result
+
+ def _build_enum_type(self, explicit_name, decls):
+ if decls is not None:
+ partial = False
+ enumerators = []
+ enumvalues = []
+ nextenumvalue = 0
+ for enum in decls.enumerators:
+ if _r_enum_dotdotdot.match(enum.name):
+ partial = True
+ continue
+ if enum.value is not None:
+ nextenumvalue = self._parse_constant(enum.value)
+ enumerators.append(enum.name)
+ enumvalues.append(nextenumvalue)
+ self._add_constants(enum.name, nextenumvalue)
+ nextenumvalue += 1
+ enumerators = tuple(enumerators)
+ enumvalues = tuple(enumvalues)
+ tp = model.EnumType(explicit_name, enumerators, enumvalues)
+ tp.partial = partial
+ else: # opaque enum
+ tp = model.EnumType(explicit_name, (), ())
+ return tp
+
+ def include(self, other):
+ for name, (tp, quals) in other._declarations.items():
+ if name.startswith('anonymous $enum_$'):
+ continue # fix for test_anonymous_enum_include
+ kind = name.split(' ', 1)[0]
+ if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'):
+ self._declare(name, tp, included=True, quals=quals)
+ for k, v in other._int_constants.items():
+ self._add_constants(k, v)
+
+ def _get_unknown_type(self, decl):
+ typenames = decl.type.type.names
+ if typenames == ['__dotdotdot__']:
+ return model.unknown_type(decl.name)
+
+ if typenames == ['__dotdotdotint__']:
+ if self._uses_new_feature is None:
+ self._uses_new_feature = "'typedef int... %s'" % decl.name
+ return model.UnknownIntegerType(decl.name)
+
+ if typenames == ['__dotdotdotfloat__']:
+ # note: not for 'long double' so far
+ if self._uses_new_feature is None:
+ self._uses_new_feature = "'typedef float... %s'" % decl.name
+ return model.UnknownFloatType(decl.name)
+
+ raise FFIError(':%d: unsupported usage of "..." in typedef'
+ % decl.coord.line)
+
+ def _get_unknown_ptr_type(self, decl):
+ if decl.type.type.type.names == ['__dotdotdot__']:
+ return model.unknown_ptr_type(decl.name)
+ raise FFIError(':%d: unsupported usage of "..." in typedef'
+ % decl.coord.line)
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/error.py b/Backend/venv/lib/python3.12/site-packages/cffi/error.py
new file mode 100644
index 00000000..0a27247c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/error.py
@@ -0,0 +1,31 @@
+
+class FFIError(Exception):
+ __module__ = 'cffi'
+
+class CDefError(Exception):
+ __module__ = 'cffi'
+ def __str__(self):
+ try:
+ current_decl = self.args[1]
+ filename = current_decl.coord.file
+ linenum = current_decl.coord.line
+ prefix = '%s:%d: ' % (filename, linenum)
+ except (AttributeError, TypeError, IndexError):
+ prefix = ''
+ return '%s%s' % (prefix, self.args[0])
+
+class VerificationError(Exception):
+ """ An error raised when verification fails
+ """
+ __module__ = 'cffi'
+
+class VerificationMissing(Exception):
+ """ An error raised when incomplete structures are passed into
+ cdef, but no verification has been done
+ """
+ __module__ = 'cffi'
+
+class PkgConfigError(Exception):
+ """ An error raised for missing modules in pkg-config
+ """
+ __module__ = 'cffi'
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/ffiplatform.py b/Backend/venv/lib/python3.12/site-packages/cffi/ffiplatform.py
new file mode 100644
index 00000000..adca28f1
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/ffiplatform.py
@@ -0,0 +1,113 @@
+import sys, os
+from .error import VerificationError
+
+
+LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs',
+ 'extra_objects', 'depends']
+
+def get_extension(srcfilename, modname, sources=(), **kwds):
+ from cffi._shimmed_dist_utils import Extension
+ allsources = [srcfilename]
+ for src in sources:
+ allsources.append(os.path.normpath(src))
+ return Extension(name=modname, sources=allsources, **kwds)
+
+def compile(tmpdir, ext, compiler_verbose=0, debug=None):
+ """Compile a C extension module using distutils."""
+
+ saved_environ = os.environ.copy()
+ try:
+ outputfilename = _build(tmpdir, ext, compiler_verbose, debug)
+ outputfilename = os.path.abspath(outputfilename)
+ finally:
+ # workaround for a distutils bugs where some env vars can
+ # become longer and longer every time it is used
+ for key, value in saved_environ.items():
+ if os.environ.get(key) != value:
+ os.environ[key] = value
+ return outputfilename
+
+def _build(tmpdir, ext, compiler_verbose=0, debug=None):
+ # XXX compact but horrible :-(
+ from cffi._shimmed_dist_utils import Distribution, CompileError, LinkError, set_threshold, set_verbosity
+
+ dist = Distribution({'ext_modules': [ext]})
+ dist.parse_config_files()
+ options = dist.get_option_dict('build_ext')
+ if debug is None:
+ debug = sys.flags.debug
+ options['debug'] = ('ffiplatform', debug)
+ options['force'] = ('ffiplatform', True)
+ options['build_lib'] = ('ffiplatform', tmpdir)
+ options['build_temp'] = ('ffiplatform', tmpdir)
+ #
+ try:
+ old_level = set_threshold(0) or 0
+ try:
+ set_verbosity(compiler_verbose)
+ dist.run_command('build_ext')
+ cmd_obj = dist.get_command_obj('build_ext')
+ [soname] = cmd_obj.get_outputs()
+ finally:
+ set_threshold(old_level)
+ except (CompileError, LinkError) as e:
+ raise VerificationError('%s: %s' % (e.__class__.__name__, e))
+ #
+ return soname
+
+try:
+ from os.path import samefile
+except ImportError:
+ def samefile(f1, f2):
+ return os.path.abspath(f1) == os.path.abspath(f2)
+
+def maybe_relative_path(path):
+ if not os.path.isabs(path):
+ return path # already relative
+ dir = path
+ names = []
+ while True:
+ prevdir = dir
+ dir, name = os.path.split(prevdir)
+ if dir == prevdir or not dir:
+ return path # failed to make it relative
+ names.append(name)
+ try:
+ if samefile(dir, os.curdir):
+ names.reverse()
+ return os.path.join(*names)
+ except OSError:
+ pass
+
+# ____________________________________________________________
+
+try:
+ int_or_long = (int, long)
+ import cStringIO
+except NameError:
+ int_or_long = int # Python 3
+ import io as cStringIO
+
+def _flatten(x, f):
+ if isinstance(x, str):
+ f.write('%ds%s' % (len(x), x))
+ elif isinstance(x, dict):
+ keys = sorted(x.keys())
+ f.write('%dd' % len(keys))
+ for key in keys:
+ _flatten(key, f)
+ _flatten(x[key], f)
+ elif isinstance(x, (list, tuple)):
+ f.write('%dl' % len(x))
+ for value in x:
+ _flatten(value, f)
+ elif isinstance(x, int_or_long):
+ f.write('%di' % (x,))
+ else:
+ raise TypeError(
+ "the keywords to verify() contains unsupported object %r" % (x,))
+
+def flatten(x):
+ f = cStringIO.StringIO()
+ _flatten(x, f)
+ return f.getvalue()
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/lock.py b/Backend/venv/lib/python3.12/site-packages/cffi/lock.py
new file mode 100644
index 00000000..db91b715
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/lock.py
@@ -0,0 +1,30 @@
+import sys
+
+if sys.version_info < (3,):
+ try:
+ from thread import allocate_lock
+ except ImportError:
+ from dummy_thread import allocate_lock
+else:
+ try:
+ from _thread import allocate_lock
+ except ImportError:
+ from _dummy_thread import allocate_lock
+
+
+##import sys
+##l1 = allocate_lock
+
+##class allocate_lock(object):
+## def __init__(self):
+## self._real = l1()
+## def __enter__(self):
+## for i in range(4, 0, -1):
+## print sys._getframe(i).f_code
+## print
+## return self._real.__enter__()
+## def __exit__(self, *args):
+## return self._real.__exit__(*args)
+## def acquire(self, f):
+## assert f is False
+## return self._real.acquire(f)
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/model.py b/Backend/venv/lib/python3.12/site-packages/cffi/model.py
new file mode 100644
index 00000000..e5f4cae3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/model.py
@@ -0,0 +1,618 @@
+import types
+import weakref
+
+from .lock import allocate_lock
+from .error import CDefError, VerificationError, VerificationMissing
+
+# type qualifiers
+Q_CONST = 0x01
+Q_RESTRICT = 0x02
+Q_VOLATILE = 0x04
+
+def qualify(quals, replace_with):
+ if quals & Q_CONST:
+ replace_with = ' const ' + replace_with.lstrip()
+ if quals & Q_VOLATILE:
+ replace_with = ' volatile ' + replace_with.lstrip()
+ if quals & Q_RESTRICT:
+ # It seems that __restrict is supported by gcc and msvc.
+ # If you hit some different compiler, add a #define in
+ # _cffi_include.h for it (and in its copies, documented there)
+ replace_with = ' __restrict ' + replace_with.lstrip()
+ return replace_with
+
+
+class BaseTypeByIdentity(object):
+ is_array_type = False
+ is_raw_function = False
+
+ def get_c_name(self, replace_with='', context='a C file', quals=0):
+ result = self.c_name_with_marker
+ assert result.count('&') == 1
+ # some logic duplication with ffi.getctype()... :-(
+ replace_with = replace_with.strip()
+ if replace_with:
+ if replace_with.startswith('*') and '&[' in result:
+ replace_with = '(%s)' % replace_with
+ elif not replace_with[0] in '[(':
+ replace_with = ' ' + replace_with
+ replace_with = qualify(quals, replace_with)
+ result = result.replace('&', replace_with)
+ if '$' in result:
+ raise VerificationError(
+ "cannot generate '%s' in %s: unknown type name"
+ % (self._get_c_name(), context))
+ return result
+
+ def _get_c_name(self):
+ return self.c_name_with_marker.replace('&', '')
+
+ def has_c_name(self):
+ return '$' not in self._get_c_name()
+
+ def is_integer_type(self):
+ return False
+
+ def get_cached_btype(self, ffi, finishlist, can_delay=False):
+ try:
+ BType = ffi._cached_btypes[self]
+ except KeyError:
+ BType = self.build_backend_type(ffi, finishlist)
+ BType2 = ffi._cached_btypes.setdefault(self, BType)
+ assert BType2 is BType
+ return BType
+
+ def __repr__(self):
+ return '<%s>' % (self._get_c_name(),)
+
+ def _get_items(self):
+ return [(name, getattr(self, name)) for name in self._attrs_]
+
+
+class BaseType(BaseTypeByIdentity):
+
+ def __eq__(self, other):
+ return (self.__class__ == other.__class__ and
+ self._get_items() == other._get_items())
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash((self.__class__, tuple(self._get_items())))
+
+
+class VoidType(BaseType):
+ _attrs_ = ()
+
+ def __init__(self):
+ self.c_name_with_marker = 'void&'
+
+ def build_backend_type(self, ffi, finishlist):
+ return global_cache(self, ffi, 'new_void_type')
+
+void_type = VoidType()
+
+
+class BasePrimitiveType(BaseType):
+ def is_complex_type(self):
+ return False
+
+
+class PrimitiveType(BasePrimitiveType):
+ _attrs_ = ('name',)
+
+ ALL_PRIMITIVE_TYPES = {
+ 'char': 'c',
+ 'short': 'i',
+ 'int': 'i',
+ 'long': 'i',
+ 'long long': 'i',
+ 'signed char': 'i',
+ 'unsigned char': 'i',
+ 'unsigned short': 'i',
+ 'unsigned int': 'i',
+ 'unsigned long': 'i',
+ 'unsigned long long': 'i',
+ 'float': 'f',
+ 'double': 'f',
+ 'long double': 'f',
+ '_cffi_float_complex_t': 'j',
+ '_cffi_double_complex_t': 'j',
+ '_Bool': 'i',
+ # the following types are not primitive in the C sense
+ 'wchar_t': 'c',
+ 'char16_t': 'c',
+ 'char32_t': 'c',
+ 'int8_t': 'i',
+ 'uint8_t': 'i',
+ 'int16_t': 'i',
+ 'uint16_t': 'i',
+ 'int32_t': 'i',
+ 'uint32_t': 'i',
+ 'int64_t': 'i',
+ 'uint64_t': 'i',
+ 'int_least8_t': 'i',
+ 'uint_least8_t': 'i',
+ 'int_least16_t': 'i',
+ 'uint_least16_t': 'i',
+ 'int_least32_t': 'i',
+ 'uint_least32_t': 'i',
+ 'int_least64_t': 'i',
+ 'uint_least64_t': 'i',
+ 'int_fast8_t': 'i',
+ 'uint_fast8_t': 'i',
+ 'int_fast16_t': 'i',
+ 'uint_fast16_t': 'i',
+ 'int_fast32_t': 'i',
+ 'uint_fast32_t': 'i',
+ 'int_fast64_t': 'i',
+ 'uint_fast64_t': 'i',
+ 'intptr_t': 'i',
+ 'uintptr_t': 'i',
+ 'intmax_t': 'i',
+ 'uintmax_t': 'i',
+ 'ptrdiff_t': 'i',
+ 'size_t': 'i',
+ 'ssize_t': 'i',
+ }
+
+ def __init__(self, name):
+ assert name in self.ALL_PRIMITIVE_TYPES
+ self.name = name
+ self.c_name_with_marker = name + '&'
+
+ def is_char_type(self):
+ return self.ALL_PRIMITIVE_TYPES[self.name] == 'c'
+ def is_integer_type(self):
+ return self.ALL_PRIMITIVE_TYPES[self.name] == 'i'
+ def is_float_type(self):
+ return self.ALL_PRIMITIVE_TYPES[self.name] == 'f'
+ def is_complex_type(self):
+ return self.ALL_PRIMITIVE_TYPES[self.name] == 'j'
+
+ def build_backend_type(self, ffi, finishlist):
+ return global_cache(self, ffi, 'new_primitive_type', self.name)
+
+
+class UnknownIntegerType(BasePrimitiveType):
+ _attrs_ = ('name',)
+
+ def __init__(self, name):
+ self.name = name
+ self.c_name_with_marker = name + '&'
+
+ def is_integer_type(self):
+ return True
+
+ def build_backend_type(self, ffi, finishlist):
+ raise NotImplementedError("integer type '%s' can only be used after "
+ "compilation" % self.name)
+
+class UnknownFloatType(BasePrimitiveType):
+ _attrs_ = ('name', )
+
+ def __init__(self, name):
+ self.name = name
+ self.c_name_with_marker = name + '&'
+
+ def build_backend_type(self, ffi, finishlist):
+ raise NotImplementedError("float type '%s' can only be used after "
+ "compilation" % self.name)
+
+
+class BaseFunctionType(BaseType):
+ _attrs_ = ('args', 'result', 'ellipsis', 'abi')
+
+ def __init__(self, args, result, ellipsis, abi=None):
+ self.args = args
+ self.result = result
+ self.ellipsis = ellipsis
+ self.abi = abi
+ #
+ reprargs = [arg._get_c_name() for arg in self.args]
+ if self.ellipsis:
+ reprargs.append('...')
+ reprargs = reprargs or ['void']
+ replace_with = self._base_pattern % (', '.join(reprargs),)
+ if abi is not None:
+ replace_with = replace_with[:1] + abi + ' ' + replace_with[1:]
+ self.c_name_with_marker = (
+ self.result.c_name_with_marker.replace('&', replace_with))
+
+
+class RawFunctionType(BaseFunctionType):
+ # Corresponds to a C type like 'int(int)', which is the C type of
+ # a function, but not a pointer-to-function. The backend has no
+ # notion of such a type; it's used temporarily by parsing.
+ _base_pattern = '(&)(%s)'
+ is_raw_function = True
+
+ def build_backend_type(self, ffi, finishlist):
+ raise CDefError("cannot render the type %r: it is a function "
+ "type, not a pointer-to-function type" % (self,))
+
+ def as_function_pointer(self):
+ return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi)
+
+
+class FunctionPtrType(BaseFunctionType):
+ _base_pattern = '(*&)(%s)'
+
+ def build_backend_type(self, ffi, finishlist):
+ result = self.result.get_cached_btype(ffi, finishlist)
+ args = []
+ for tp in self.args:
+ args.append(tp.get_cached_btype(ffi, finishlist))
+ abi_args = ()
+ if self.abi == "__stdcall":
+ if not self.ellipsis: # __stdcall ignored for variadic funcs
+ try:
+ abi_args = (ffi._backend.FFI_STDCALL,)
+ except AttributeError:
+ pass
+ return global_cache(self, ffi, 'new_function_type',
+ tuple(args), result, self.ellipsis, *abi_args)
+
+ def as_raw_function(self):
+ return RawFunctionType(self.args, self.result, self.ellipsis, self.abi)
+
+
+class PointerType(BaseType):
+ _attrs_ = ('totype', 'quals')
+
+ def __init__(self, totype, quals=0):
+ self.totype = totype
+ self.quals = quals
+ extra = " *&"
+ if totype.is_array_type:
+ extra = "(%s)" % (extra.lstrip(),)
+ extra = qualify(quals, extra)
+ self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra)
+
+ def build_backend_type(self, ffi, finishlist):
+ BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True)
+ return global_cache(self, ffi, 'new_pointer_type', BItem)
+
+voidp_type = PointerType(void_type)
+
+def ConstPointerType(totype):
+ return PointerType(totype, Q_CONST)
+
+const_voidp_type = ConstPointerType(void_type)
+
+
+class NamedPointerType(PointerType):
+ _attrs_ = ('totype', 'name')
+
+ def __init__(self, totype, name, quals=0):
+ PointerType.__init__(self, totype, quals)
+ self.name = name
+ self.c_name_with_marker = name + '&'
+
+
+class ArrayType(BaseType):
+ _attrs_ = ('item', 'length')
+ is_array_type = True
+
+ def __init__(self, item, length):
+ self.item = item
+ self.length = length
+ #
+ if length is None:
+ brackets = '&[]'
+ elif length == '...':
+ brackets = '&[/*...*/]'
+ else:
+ brackets = '&[%s]' % length
+ self.c_name_with_marker = (
+ self.item.c_name_with_marker.replace('&', brackets))
+
+ def length_is_unknown(self):
+ return isinstance(self.length, str)
+
+ def resolve_length(self, newlength):
+ return ArrayType(self.item, newlength)
+
+ def build_backend_type(self, ffi, finishlist):
+ if self.length_is_unknown():
+ raise CDefError("cannot render the type %r: unknown length" %
+ (self,))
+ self.item.get_cached_btype(ffi, finishlist) # force the item BType
+ BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist)
+ return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length)
+
+char_array_type = ArrayType(PrimitiveType('char'), None)
+
+
+class StructOrUnionOrEnum(BaseTypeByIdentity):
+ _attrs_ = ('name',)
+ forcename = None
+
+ def build_c_name_with_marker(self):
+ name = self.forcename or '%s %s' % (self.kind, self.name)
+ self.c_name_with_marker = name + '&'
+
+ def force_the_name(self, forcename):
+ self.forcename = forcename
+ self.build_c_name_with_marker()
+
+ def get_official_name(self):
+ assert self.c_name_with_marker.endswith('&')
+ return self.c_name_with_marker[:-1]
+
+
+class StructOrUnion(StructOrUnionOrEnum):
+ fixedlayout = None
+ completed = 0
+ partial = False
+ packed = 0
+
+ def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None):
+ self.name = name
+ self.fldnames = fldnames
+ self.fldtypes = fldtypes
+ self.fldbitsize = fldbitsize
+ self.fldquals = fldquals
+ self.build_c_name_with_marker()
+
+ def anonymous_struct_fields(self):
+ if self.fldtypes is not None:
+ for name, type in zip(self.fldnames, self.fldtypes):
+ if name == '' and isinstance(type, StructOrUnion):
+ yield type
+
+ def enumfields(self, expand_anonymous_struct_union=True):
+ fldquals = self.fldquals
+ if fldquals is None:
+ fldquals = (0,) * len(self.fldnames)
+ for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes,
+ self.fldbitsize, fldquals):
+ if (name == '' and isinstance(type, StructOrUnion)
+ and expand_anonymous_struct_union):
+ # nested anonymous struct/union
+ for result in type.enumfields():
+ yield result
+ else:
+ yield (name, type, bitsize, quals)
+
+ def force_flatten(self):
+ # force the struct or union to have a declaration that lists
+ # directly all fields returned by enumfields(), flattening
+ # nested anonymous structs/unions.
+ names = []
+ types = []
+ bitsizes = []
+ fldquals = []
+ for name, type, bitsize, quals in self.enumfields():
+ names.append(name)
+ types.append(type)
+ bitsizes.append(bitsize)
+ fldquals.append(quals)
+ self.fldnames = tuple(names)
+ self.fldtypes = tuple(types)
+ self.fldbitsize = tuple(bitsizes)
+ self.fldquals = tuple(fldquals)
+
+ def get_cached_btype(self, ffi, finishlist, can_delay=False):
+ BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist,
+ can_delay)
+ if not can_delay:
+ self.finish_backend_type(ffi, finishlist)
+ return BType
+
+ def finish_backend_type(self, ffi, finishlist):
+ if self.completed:
+ if self.completed != 2:
+ raise NotImplementedError("recursive structure declaration "
+ "for '%s'" % (self.name,))
+ return
+ BType = ffi._cached_btypes[self]
+ #
+ self.completed = 1
+ #
+ if self.fldtypes is None:
+ pass # not completing it: it's an opaque struct
+ #
+ elif self.fixedlayout is None:
+ fldtypes = [tp.get_cached_btype(ffi, finishlist)
+ for tp in self.fldtypes]
+ lst = list(zip(self.fldnames, fldtypes, self.fldbitsize))
+ extra_flags = ()
+ if self.packed:
+ if self.packed == 1:
+ extra_flags = (8,) # SF_PACKED
+ else:
+ extra_flags = (0, self.packed)
+ ffi._backend.complete_struct_or_union(BType, lst, self,
+ -1, -1, *extra_flags)
+ #
+ else:
+ fldtypes = []
+ fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout
+ for i in range(len(self.fldnames)):
+ fsize = fieldsize[i]
+ ftype = self.fldtypes[i]
+ #
+ if isinstance(ftype, ArrayType) and ftype.length_is_unknown():
+ # fix the length to match the total size
+ BItemType = ftype.item.get_cached_btype(ffi, finishlist)
+ nlen, nrest = divmod(fsize, ffi.sizeof(BItemType))
+ if nrest != 0:
+ self._verification_error(
+ "field '%s.%s' has a bogus size?" % (
+ self.name, self.fldnames[i] or '{}'))
+ ftype = ftype.resolve_length(nlen)
+ self.fldtypes = (self.fldtypes[:i] + (ftype,) +
+ self.fldtypes[i+1:])
+ #
+ BFieldType = ftype.get_cached_btype(ffi, finishlist)
+ if isinstance(ftype, ArrayType) and ftype.length is None:
+ assert fsize == 0
+ else:
+ bitemsize = ffi.sizeof(BFieldType)
+ if bitemsize != fsize:
+ self._verification_error(
+ "field '%s.%s' is declared as %d bytes, but is "
+ "really %d bytes" % (self.name,
+ self.fldnames[i] or '{}',
+ bitemsize, fsize))
+ fldtypes.append(BFieldType)
+ #
+ lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs))
+ ffi._backend.complete_struct_or_union(BType, lst, self,
+ totalsize, totalalignment)
+ self.completed = 2
+
+ def _verification_error(self, msg):
+ raise VerificationError(msg)
+
+ def check_not_partial(self):
+ if self.partial and self.fixedlayout is None:
+ raise VerificationMissing(self._get_c_name())
+
+ def build_backend_type(self, ffi, finishlist):
+ self.check_not_partial()
+ finishlist.append(self)
+ #
+ return global_cache(self, ffi, 'new_%s_type' % self.kind,
+ self.get_official_name(), key=self)
+
+
+class StructType(StructOrUnion):
+ kind = 'struct'
+
+
+class UnionType(StructOrUnion):
+ kind = 'union'
+
+
+class EnumType(StructOrUnionOrEnum):
+ kind = 'enum'
+ partial = False
+ partial_resolved = False
+
+ def __init__(self, name, enumerators, enumvalues, baseinttype=None):
+ self.name = name
+ self.enumerators = enumerators
+ self.enumvalues = enumvalues
+ self.baseinttype = baseinttype
+ self.build_c_name_with_marker()
+
+ def force_the_name(self, forcename):
+ StructOrUnionOrEnum.force_the_name(self, forcename)
+ if self.forcename is None:
+ name = self.get_official_name()
+ self.forcename = '$' + name.replace(' ', '_')
+
+ def check_not_partial(self):
+ if self.partial and not self.partial_resolved:
+ raise VerificationMissing(self._get_c_name())
+
+ def build_backend_type(self, ffi, finishlist):
+ self.check_not_partial()
+ base_btype = self.build_baseinttype(ffi, finishlist)
+ return global_cache(self, ffi, 'new_enum_type',
+ self.get_official_name(),
+ self.enumerators, self.enumvalues,
+ base_btype, key=self)
+
+ def build_baseinttype(self, ffi, finishlist):
+ if self.baseinttype is not None:
+ return self.baseinttype.get_cached_btype(ffi, finishlist)
+ #
+ if self.enumvalues:
+ smallest_value = min(self.enumvalues)
+ largest_value = max(self.enumvalues)
+ else:
+ import warnings
+ try:
+ # XXX! The goal is to ensure that the warnings.warn()
+ # will not suppress the warning. We want to get it
+ # several times if we reach this point several times.
+ __warningregistry__.clear()
+ except NameError:
+ pass
+ warnings.warn("%r has no values explicitly defined; "
+ "guessing that it is equivalent to 'unsigned int'"
+ % self._get_c_name())
+ smallest_value = largest_value = 0
+ if smallest_value < 0: # needs a signed type
+ sign = 1
+ candidate1 = PrimitiveType("int")
+ candidate2 = PrimitiveType("long")
+ else:
+ sign = 0
+ candidate1 = PrimitiveType("unsigned int")
+ candidate2 = PrimitiveType("unsigned long")
+ btype1 = candidate1.get_cached_btype(ffi, finishlist)
+ btype2 = candidate2.get_cached_btype(ffi, finishlist)
+ size1 = ffi.sizeof(btype1)
+ size2 = ffi.sizeof(btype2)
+ if (smallest_value >= ((-1) << (8*size1-1)) and
+ largest_value < (1 << (8*size1-sign))):
+ return btype1
+ if (smallest_value >= ((-1) << (8*size2-1)) and
+ largest_value < (1 << (8*size2-sign))):
+ return btype2
+ raise CDefError("%s values don't all fit into either 'long' "
+ "or 'unsigned long'" % self._get_c_name())
+
+def unknown_type(name, structname=None):
+ if structname is None:
+ structname = '$%s' % name
+ tp = StructType(structname, None, None, None)
+ tp.force_the_name(name)
+ tp.origin = "unknown_type"
+ return tp
+
+def unknown_ptr_type(name, structname=None):
+ if structname is None:
+ structname = '$$%s' % name
+ tp = StructType(structname, None, None, None)
+ return NamedPointerType(tp, name)
+
+
+global_lock = allocate_lock()
+_typecache_cffi_backend = weakref.WeakValueDictionary()
+
+def get_typecache(backend):
+ # returns _typecache_cffi_backend if backend is the _cffi_backend
+ # module, or type(backend).__typecache if backend is an instance of
+ # CTypesBackend (or some FakeBackend class during tests)
+ if isinstance(backend, types.ModuleType):
+ return _typecache_cffi_backend
+ with global_lock:
+ if not hasattr(type(backend), '__typecache'):
+ type(backend).__typecache = weakref.WeakValueDictionary()
+ return type(backend).__typecache
+
+def global_cache(srctype, ffi, funcname, *args, **kwds):
+ key = kwds.pop('key', (funcname, args))
+ assert not kwds
+ try:
+ return ffi._typecache[key]
+ except KeyError:
+ pass
+ try:
+ res = getattr(ffi._backend, funcname)(*args)
+ except NotImplementedError as e:
+ raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e))
+ # note that setdefault() on WeakValueDictionary is not atomic
+ # and contains a rare bug (http://bugs.python.org/issue19542);
+ # we have to use a lock and do it ourselves
+ cache = ffi._typecache
+ with global_lock:
+ res1 = cache.get(key)
+ if res1 is None:
+ cache[key] = res
+ return res
+ else:
+ return res1
+
+def pointer_cache(ffi, BType):
+ return global_cache('?', ffi, 'new_pointer_type', BType)
+
+def attach_exception_info(e, name):
+ if e.args and type(e.args[0]) is str:
+ e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:]
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/parse_c_type.h b/Backend/venv/lib/python3.12/site-packages/cffi/parse_c_type.h
new file mode 100644
index 00000000..84e4ef85
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/parse_c_type.h
@@ -0,0 +1,181 @@
+
+/* This part is from file 'cffi/parse_c_type.h'. It is copied at the
+ beginning of C sources generated by CFFI's ffi.set_source(). */
+
+typedef void *_cffi_opcode_t;
+
+#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8))
+#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode)
+#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8)
+
+#define _CFFI_OP_PRIMITIVE 1
+#define _CFFI_OP_POINTER 3
+#define _CFFI_OP_ARRAY 5
+#define _CFFI_OP_OPEN_ARRAY 7
+#define _CFFI_OP_STRUCT_UNION 9
+#define _CFFI_OP_ENUM 11
+#define _CFFI_OP_FUNCTION 13
+#define _CFFI_OP_FUNCTION_END 15
+#define _CFFI_OP_NOOP 17
+#define _CFFI_OP_BITFIELD 19
+#define _CFFI_OP_TYPENAME 21
+#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs
+#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs
+#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg)
+#define _CFFI_OP_CONSTANT 29
+#define _CFFI_OP_CONSTANT_INT 31
+#define _CFFI_OP_GLOBAL_VAR 33
+#define _CFFI_OP_DLOPEN_FUNC 35
+#define _CFFI_OP_DLOPEN_CONST 37
+#define _CFFI_OP_GLOBAL_VAR_F 39
+#define _CFFI_OP_EXTERN_PYTHON 41
+
+#define _CFFI_PRIM_VOID 0
+#define _CFFI_PRIM_BOOL 1
+#define _CFFI_PRIM_CHAR 2
+#define _CFFI_PRIM_SCHAR 3
+#define _CFFI_PRIM_UCHAR 4
+#define _CFFI_PRIM_SHORT 5
+#define _CFFI_PRIM_USHORT 6
+#define _CFFI_PRIM_INT 7
+#define _CFFI_PRIM_UINT 8
+#define _CFFI_PRIM_LONG 9
+#define _CFFI_PRIM_ULONG 10
+#define _CFFI_PRIM_LONGLONG 11
+#define _CFFI_PRIM_ULONGLONG 12
+#define _CFFI_PRIM_FLOAT 13
+#define _CFFI_PRIM_DOUBLE 14
+#define _CFFI_PRIM_LONGDOUBLE 15
+
+#define _CFFI_PRIM_WCHAR 16
+#define _CFFI_PRIM_INT8 17
+#define _CFFI_PRIM_UINT8 18
+#define _CFFI_PRIM_INT16 19
+#define _CFFI_PRIM_UINT16 20
+#define _CFFI_PRIM_INT32 21
+#define _CFFI_PRIM_UINT32 22
+#define _CFFI_PRIM_INT64 23
+#define _CFFI_PRIM_UINT64 24
+#define _CFFI_PRIM_INTPTR 25
+#define _CFFI_PRIM_UINTPTR 26
+#define _CFFI_PRIM_PTRDIFF 27
+#define _CFFI_PRIM_SIZE 28
+#define _CFFI_PRIM_SSIZE 29
+#define _CFFI_PRIM_INT_LEAST8 30
+#define _CFFI_PRIM_UINT_LEAST8 31
+#define _CFFI_PRIM_INT_LEAST16 32
+#define _CFFI_PRIM_UINT_LEAST16 33
+#define _CFFI_PRIM_INT_LEAST32 34
+#define _CFFI_PRIM_UINT_LEAST32 35
+#define _CFFI_PRIM_INT_LEAST64 36
+#define _CFFI_PRIM_UINT_LEAST64 37
+#define _CFFI_PRIM_INT_FAST8 38
+#define _CFFI_PRIM_UINT_FAST8 39
+#define _CFFI_PRIM_INT_FAST16 40
+#define _CFFI_PRIM_UINT_FAST16 41
+#define _CFFI_PRIM_INT_FAST32 42
+#define _CFFI_PRIM_UINT_FAST32 43
+#define _CFFI_PRIM_INT_FAST64 44
+#define _CFFI_PRIM_UINT_FAST64 45
+#define _CFFI_PRIM_INTMAX 46
+#define _CFFI_PRIM_UINTMAX 47
+#define _CFFI_PRIM_FLOATCOMPLEX 48
+#define _CFFI_PRIM_DOUBLECOMPLEX 49
+#define _CFFI_PRIM_CHAR16 50
+#define _CFFI_PRIM_CHAR32 51
+
+#define _CFFI__NUM_PRIM 52
+#define _CFFI__UNKNOWN_PRIM (-1)
+#define _CFFI__UNKNOWN_FLOAT_PRIM (-2)
+#define _CFFI__UNKNOWN_LONG_DOUBLE (-3)
+
+#define _CFFI__IO_FILE_STRUCT (-1)
+
+
+struct _cffi_global_s {
+ const char *name;
+ void *address;
+ _cffi_opcode_t type_op;
+ void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown
+ // OP_CPYTHON_BLTN_*: addr of direct function
+};
+
+struct _cffi_getconst_s {
+ unsigned long long value;
+ const struct _cffi_type_context_s *ctx;
+ int gindex;
+};
+
+struct _cffi_struct_union_s {
+ const char *name;
+ int type_index; // -> _cffi_types, on a OP_STRUCT_UNION
+ int flags; // _CFFI_F_* flags below
+ size_t size;
+ int alignment;
+ int first_field_index; // -> _cffi_fields array
+ int num_fields;
+};
+#define _CFFI_F_UNION 0x01 // is a union, not a struct
+#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the
+ // "standard layout" or if some are missing
+#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct
+#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include()
+#define _CFFI_F_OPAQUE 0x10 // opaque
+
+struct _cffi_field_s {
+ const char *name;
+ size_t field_offset;
+ size_t field_size;
+ _cffi_opcode_t field_type_op;
+};
+
+struct _cffi_enum_s {
+ const char *name;
+ int type_index; // -> _cffi_types, on a OP_ENUM
+ int type_prim; // _CFFI_PRIM_xxx
+ const char *enumerators; // comma-delimited string
+};
+
+struct _cffi_typename_s {
+ const char *name;
+ int type_index; /* if opaque, points to a possibly artificial
+ OP_STRUCT which is itself opaque */
+};
+
+struct _cffi_type_context_s {
+ _cffi_opcode_t *types;
+ const struct _cffi_global_s *globals;
+ const struct _cffi_field_s *fields;
+ const struct _cffi_struct_union_s *struct_unions;
+ const struct _cffi_enum_s *enums;
+ const struct _cffi_typename_s *typenames;
+ int num_globals;
+ int num_struct_unions;
+ int num_enums;
+ int num_typenames;
+ const char *const *includes;
+ int num_types;
+ int flags; /* future extension */
+};
+
+struct _cffi_parse_info_s {
+ const struct _cffi_type_context_s *ctx;
+ _cffi_opcode_t *output;
+ unsigned int output_size;
+ size_t error_location;
+ const char *error_message;
+};
+
+struct _cffi_externpy_s {
+ const char *name;
+ size_t size_of_result;
+ void *reserved1, *reserved2;
+};
+
+#ifdef _CFFI_INTERNAL
+static int parse_c_type(struct _cffi_parse_info_s *info, const char *input);
+static int search_in_globals(const struct _cffi_type_context_s *ctx,
+ const char *search, size_t search_len);
+static int search_in_struct_unions(const struct _cffi_type_context_s *ctx,
+ const char *search, size_t search_len);
+#endif
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/pkgconfig.py b/Backend/venv/lib/python3.12/site-packages/cffi/pkgconfig.py
new file mode 100644
index 00000000..5c93f15a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/pkgconfig.py
@@ -0,0 +1,121 @@
+# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi
+import sys, os, subprocess
+
+from .error import PkgConfigError
+
+
+def merge_flags(cfg1, cfg2):
+ """Merge values from cffi config flags cfg2 to cf1
+
+ Example:
+ merge_flags({"libraries": ["one"]}, {"libraries": ["two"]})
+ {"libraries": ["one", "two"]}
+ """
+ for key, value in cfg2.items():
+ if key not in cfg1:
+ cfg1[key] = value
+ else:
+ if not isinstance(cfg1[key], list):
+ raise TypeError("cfg1[%r] should be a list of strings" % (key,))
+ if not isinstance(value, list):
+ raise TypeError("cfg2[%r] should be a list of strings" % (key,))
+ cfg1[key].extend(value)
+ return cfg1
+
+
+def call(libname, flag, encoding=sys.getfilesystemencoding()):
+ """Calls pkg-config and returns the output if found
+ """
+ a = ["pkg-config", "--print-errors"]
+ a.append(flag)
+ a.append(libname)
+ try:
+ pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ except EnvironmentError as e:
+ raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),))
+
+ bout, berr = pc.communicate()
+ if pc.returncode != 0:
+ try:
+ berr = berr.decode(encoding)
+ except Exception:
+ pass
+ raise PkgConfigError(berr.strip())
+
+ if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x
+ try:
+ bout = bout.decode(encoding)
+ except UnicodeDecodeError:
+ raise PkgConfigError("pkg-config %s %s returned bytes that cannot "
+ "be decoded with encoding %r:\n%r" %
+ (flag, libname, encoding, bout))
+
+ if os.altsep != '\\' and '\\' in bout:
+ raise PkgConfigError("pkg-config %s %s returned an unsupported "
+ "backslash-escaped output:\n%r" %
+ (flag, libname, bout))
+ return bout
+
+
+def flags_from_pkgconfig(libs):
+ r"""Return compiler line flags for FFI.set_source based on pkg-config output
+
+ Usage
+ ...
+ ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"])
+
+ If pkg-config is installed on build machine, then arguments include_dirs,
+ library_dirs, libraries, define_macros, extra_compile_args and
+ extra_link_args are extended with an output of pkg-config for libfoo and
+ libbar.
+
+ Raises PkgConfigError in case the pkg-config call fails.
+ """
+
+ def get_include_dirs(string):
+ return [x[2:] for x in string.split() if x.startswith("-I")]
+
+ def get_library_dirs(string):
+ return [x[2:] for x in string.split() if x.startswith("-L")]
+
+ def get_libraries(string):
+ return [x[2:] for x in string.split() if x.startswith("-l")]
+
+ # convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils
+ def get_macros(string):
+ def _macro(x):
+ x = x[2:] # drop "-D"
+ if '=' in x:
+ return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar")
+ else:
+ return (x, None) # "-Dfoo" => ("foo", None)
+ return [_macro(x) for x in string.split() if x.startswith("-D")]
+
+ def get_other_cflags(string):
+ return [x for x in string.split() if not x.startswith("-I") and
+ not x.startswith("-D")]
+
+ def get_other_libs(string):
+ return [x for x in string.split() if not x.startswith("-L") and
+ not x.startswith("-l")]
+
+ # return kwargs for given libname
+ def kwargs(libname):
+ fse = sys.getfilesystemencoding()
+ all_cflags = call(libname, "--cflags")
+ all_libs = call(libname, "--libs")
+ return {
+ "include_dirs": get_include_dirs(all_cflags),
+ "library_dirs": get_library_dirs(all_libs),
+ "libraries": get_libraries(all_libs),
+ "define_macros": get_macros(all_cflags),
+ "extra_compile_args": get_other_cflags(all_cflags),
+ "extra_link_args": get_other_libs(all_libs),
+ }
+
+ # merge all arguments together
+ ret = {}
+ for libname in libs:
+ lib_flags = kwargs(libname)
+ merge_flags(ret, lib_flags)
+ return ret
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/recompiler.py b/Backend/venv/lib/python3.12/site-packages/cffi/recompiler.py
new file mode 100644
index 00000000..7734a348
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/recompiler.py
@@ -0,0 +1,1598 @@
+import io, os, sys, sysconfig
+from . import ffiplatform, model
+from .error import VerificationError
+from .cffi_opcode import *
+
+VERSION_BASE = 0x2601
+VERSION_EMBEDDED = 0x2701
+VERSION_CHAR16CHAR32 = 0x2801
+
+USE_LIMITED_API = ((sys.platform != 'win32' or sys.version_info < (3, 0) or
+ sys.version_info >= (3, 5)) and
+ not sysconfig.get_config_var("Py_GIL_DISABLED")) # free-threaded doesn't yet support limited API
+
+class GlobalExpr:
+ def __init__(self, name, address, type_op, size=0, check_value=0):
+ self.name = name
+ self.address = address
+ self.type_op = type_op
+ self.size = size
+ self.check_value = check_value
+
+ def as_c_expr(self):
+ return ' { "%s", (void *)%s, %s, (void *)%s },' % (
+ self.name, self.address, self.type_op.as_c_expr(), self.size)
+
+ def as_python_expr(self):
+ return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name,
+ self.check_value)
+
+class FieldExpr:
+ def __init__(self, name, field_offset, field_size, fbitsize, field_type_op):
+ self.name = name
+ self.field_offset = field_offset
+ self.field_size = field_size
+ self.fbitsize = fbitsize
+ self.field_type_op = field_type_op
+
+ def as_c_expr(self):
+ spaces = " " * len(self.name)
+ return (' { "%s", %s,\n' % (self.name, self.field_offset) +
+ ' %s %s,\n' % (spaces, self.field_size) +
+ ' %s %s },' % (spaces, self.field_type_op.as_c_expr()))
+
+ def as_python_expr(self):
+ raise NotImplementedError
+
+ def as_field_python_expr(self):
+ if self.field_type_op.op == OP_NOOP:
+ size_expr = ''
+ elif self.field_type_op.op == OP_BITFIELD:
+ size_expr = format_four_bytes(self.fbitsize)
+ else:
+ raise NotImplementedError
+ return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(),
+ size_expr,
+ self.name)
+
+class StructUnionExpr:
+ def __init__(self, name, type_index, flags, size, alignment, comment,
+ first_field_index, c_fields):
+ self.name = name
+ self.type_index = type_index
+ self.flags = flags
+ self.size = size
+ self.alignment = alignment
+ self.comment = comment
+ self.first_field_index = first_field_index
+ self.c_fields = c_fields
+
+ def as_c_expr(self):
+ return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags)
+ + '\n %s, %s, ' % (self.size, self.alignment)
+ + '%d, %d ' % (self.first_field_index, len(self.c_fields))
+ + ('/* %s */ ' % self.comment if self.comment else '')
+ + '},')
+
+ def as_python_expr(self):
+ flags = eval(self.flags, G_FLAGS)
+ fields_expr = [c_field.as_field_python_expr()
+ for c_field in self.c_fields]
+ return "(b'%s%s%s',%s)" % (
+ format_four_bytes(self.type_index),
+ format_four_bytes(flags),
+ self.name,
+ ','.join(fields_expr))
+
+class EnumExpr:
+ def __init__(self, name, type_index, size, signed, allenums):
+ self.name = name
+ self.type_index = type_index
+ self.size = size
+ self.signed = signed
+ self.allenums = allenums
+
+ def as_c_expr(self):
+ return (' { "%s", %d, _cffi_prim_int(%s, %s),\n'
+ ' "%s" },' % (self.name, self.type_index,
+ self.size, self.signed, self.allenums))
+
+ def as_python_expr(self):
+ prim_index = {
+ (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8,
+ (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16,
+ (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32,
+ (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64,
+ }[self.size, self.signed]
+ return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index),
+ format_four_bytes(prim_index),
+ self.name, self.allenums)
+
+class TypenameExpr:
+ def __init__(self, name, type_index):
+ self.name = name
+ self.type_index = type_index
+
+ def as_c_expr(self):
+ return ' { "%s", %d },' % (self.name, self.type_index)
+
+ def as_python_expr(self):
+ return "b'%s%s'" % (format_four_bytes(self.type_index), self.name)
+
+
+# ____________________________________________________________
+
+
+class Recompiler:
+ _num_externpy = 0
+
+ def __init__(self, ffi, module_name, target_is_python=False):
+ self.ffi = ffi
+ self.module_name = module_name
+ self.target_is_python = target_is_python
+ self._version = VERSION_BASE
+
+ def needs_version(self, ver):
+ self._version = max(self._version, ver)
+
+ def collect_type_table(self):
+ self._typesdict = {}
+ self._generate("collecttype")
+ #
+ all_decls = sorted(self._typesdict, key=str)
+ #
+ # prepare all FUNCTION bytecode sequences first
+ self.cffi_types = []
+ for tp in all_decls:
+ if tp.is_raw_function:
+ assert self._typesdict[tp] is None
+ self._typesdict[tp] = len(self.cffi_types)
+ self.cffi_types.append(tp) # placeholder
+ for tp1 in tp.args:
+ assert isinstance(tp1, (model.VoidType,
+ model.BasePrimitiveType,
+ model.PointerType,
+ model.StructOrUnionOrEnum,
+ model.FunctionPtrType))
+ if self._typesdict[tp1] is None:
+ self._typesdict[tp1] = len(self.cffi_types)
+ self.cffi_types.append(tp1) # placeholder
+ self.cffi_types.append('END') # placeholder
+ #
+ # prepare all OTHER bytecode sequences
+ for tp in all_decls:
+ if not tp.is_raw_function and self._typesdict[tp] is None:
+ self._typesdict[tp] = len(self.cffi_types)
+ self.cffi_types.append(tp) # placeholder
+ if tp.is_array_type and tp.length is not None:
+ self.cffi_types.append('LEN') # placeholder
+ assert None not in self._typesdict.values()
+ #
+ # collect all structs and unions and enums
+ self._struct_unions = {}
+ self._enums = {}
+ for tp in all_decls:
+ if isinstance(tp, model.StructOrUnion):
+ self._struct_unions[tp] = None
+ elif isinstance(tp, model.EnumType):
+ self._enums[tp] = None
+ for i, tp in enumerate(sorted(self._struct_unions,
+ key=lambda tp: tp.name)):
+ self._struct_unions[tp] = i
+ for i, tp in enumerate(sorted(self._enums,
+ key=lambda tp: tp.name)):
+ self._enums[tp] = i
+ #
+ # emit all bytecode sequences now
+ for tp in all_decls:
+ method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__)
+ method(tp, self._typesdict[tp])
+ #
+ # consistency check
+ for op in self.cffi_types:
+ assert isinstance(op, CffiOp)
+ self.cffi_types = tuple(self.cffi_types) # don't change any more
+
+ def _enum_fields(self, tp):
+ # When producing C, expand all anonymous struct/union fields.
+ # That's necessary to have C code checking the offsets of the
+ # individual fields contained in them. When producing Python,
+ # don't do it and instead write it like it is, with the
+ # corresponding fields having an empty name. Empty names are
+ # recognized at runtime when we import the generated Python
+ # file.
+ expand_anonymous_struct_union = not self.target_is_python
+ return tp.enumfields(expand_anonymous_struct_union)
+
+ def _do_collect_type(self, tp):
+ if not isinstance(tp, model.BaseTypeByIdentity):
+ if isinstance(tp, tuple):
+ for x in tp:
+ self._do_collect_type(x)
+ return
+ if tp not in self._typesdict:
+ self._typesdict[tp] = None
+ if isinstance(tp, model.FunctionPtrType):
+ self._do_collect_type(tp.as_raw_function())
+ elif isinstance(tp, model.StructOrUnion):
+ if tp.fldtypes is not None and (
+ tp not in self.ffi._parser._included_declarations):
+ for name1, tp1, _, _ in self._enum_fields(tp):
+ self._do_collect_type(self._field_type(tp, name1, tp1))
+ else:
+ for _, x in tp._get_items():
+ self._do_collect_type(x)
+
+ def _generate(self, step_name):
+ lst = self.ffi._parser._declarations.items()
+ for name, (tp, quals) in sorted(lst):
+ kind, realname = name.split(' ', 1)
+ try:
+ method = getattr(self, '_generate_cpy_%s_%s' % (kind,
+ step_name))
+ except AttributeError:
+ raise VerificationError(
+ "not implemented in recompile(): %r" % name)
+ try:
+ self._current_quals = quals
+ method(tp, realname)
+ except Exception as e:
+ model.attach_exception_info(e, name)
+ raise
+
+ # ----------
+
+ ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"]
+
+ def collect_step_tables(self):
+ # collect the declarations for '_cffi_globals', '_cffi_typenames', etc.
+ self._lsts = {}
+ for step_name in self.ALL_STEPS:
+ self._lsts[step_name] = []
+ self._seen_struct_unions = set()
+ self._generate("ctx")
+ self._add_missing_struct_unions()
+ #
+ for step_name in self.ALL_STEPS:
+ lst = self._lsts[step_name]
+ if step_name != "field":
+ lst.sort(key=lambda entry: entry.name)
+ self._lsts[step_name] = tuple(lst) # don't change any more
+ #
+ # check for a possible internal inconsistency: _cffi_struct_unions
+ # should have been generated with exactly self._struct_unions
+ lst = self._lsts["struct_union"]
+ for tp, i in self._struct_unions.items():
+ assert i < len(lst)
+ assert lst[i].name == tp.name
+ assert len(lst) == len(self._struct_unions)
+ # same with enums
+ lst = self._lsts["enum"]
+ for tp, i in self._enums.items():
+ assert i < len(lst)
+ assert lst[i].name == tp.name
+ assert len(lst) == len(self._enums)
+
+ # ----------
+
+ def _prnt(self, what=''):
+ self._f.write(what + '\n')
+
+ def write_source_to_f(self, f, preamble):
+ if self.target_is_python:
+ assert preamble is None
+ self.write_py_source_to_f(f)
+ else:
+ assert preamble is not None
+ self.write_c_source_to_f(f, preamble)
+
+ def _rel_readlines(self, filename):
+ g = open(os.path.join(os.path.dirname(__file__), filename), 'r')
+ lines = g.readlines()
+ g.close()
+ return lines
+
+ def write_c_source_to_f(self, f, preamble):
+ self._f = f
+ prnt = self._prnt
+ if self.ffi._embedding is not None:
+ prnt('#define _CFFI_USE_EMBEDDING')
+ if not USE_LIMITED_API:
+ prnt('#define _CFFI_NO_LIMITED_API')
+ #
+ # first the '#include' (actually done by inlining the file's content)
+ lines = self._rel_readlines('_cffi_include.h')
+ i = lines.index('#include "parse_c_type.h"\n')
+ lines[i:i+1] = self._rel_readlines('parse_c_type.h')
+ prnt(''.join(lines))
+ #
+ # if we have ffi._embedding != None, we give it here as a macro
+ # and include an extra file
+ base_module_name = self.module_name.split('.')[-1]
+ if self.ffi._embedding is not None:
+ prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,))
+ prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {')
+ self._print_string_literal_in_array(self.ffi._embedding)
+ prnt('0 };')
+ prnt('#ifdef PYPY_VERSION')
+ prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % (
+ base_module_name,))
+ prnt('#elif PY_MAJOR_VERSION >= 3')
+ prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % (
+ base_module_name,))
+ prnt('#else')
+ prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % (
+ base_module_name,))
+ prnt('#endif')
+ lines = self._rel_readlines('_embedding.h')
+ i = lines.index('#include "_cffi_errors.h"\n')
+ lines[i:i+1] = self._rel_readlines('_cffi_errors.h')
+ prnt(''.join(lines))
+ self.needs_version(VERSION_EMBEDDED)
+ #
+ # then paste the C source given by the user, verbatim.
+ prnt('/************************************************************/')
+ prnt()
+ prnt(preamble)
+ prnt()
+ prnt('/************************************************************/')
+ prnt()
+ #
+ # the declaration of '_cffi_types'
+ prnt('static void *_cffi_types[] = {')
+ typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()])
+ for i, op in enumerate(self.cffi_types):
+ comment = ''
+ if i in typeindex2type:
+ comment = ' // ' + typeindex2type[i]._get_c_name()
+ prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment))
+ if not self.cffi_types:
+ prnt(' 0')
+ prnt('};')
+ prnt()
+ #
+ # call generate_cpy_xxx_decl(), for every xxx found from
+ # ffi._parser._declarations. This generates all the functions.
+ self._seen_constants = set()
+ self._generate("decl")
+ #
+ # the declaration of '_cffi_globals' and '_cffi_typenames'
+ nums = {}
+ for step_name in self.ALL_STEPS:
+ lst = self._lsts[step_name]
+ nums[step_name] = len(lst)
+ if nums[step_name] > 0:
+ prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % (
+ step_name, step_name))
+ for entry in lst:
+ prnt(entry.as_c_expr())
+ prnt('};')
+ prnt()
+ #
+ # the declaration of '_cffi_includes'
+ if self.ffi._included_ffis:
+ prnt('static const char * const _cffi_includes[] = {')
+ for ffi_to_include in self.ffi._included_ffis:
+ try:
+ included_module_name, included_source = (
+ ffi_to_include._assigned_source[:2])
+ except AttributeError:
+ raise VerificationError(
+ "ffi object %r includes %r, but the latter has not "
+ "been prepared with set_source()" % (
+ self.ffi, ffi_to_include,))
+ if included_source is None:
+ raise VerificationError(
+ "not implemented yet: ffi.include() of a Python-based "
+ "ffi inside a C-based ffi")
+ prnt(' "%s",' % (included_module_name,))
+ prnt(' NULL')
+ prnt('};')
+ prnt()
+ #
+ # the declaration of '_cffi_type_context'
+ prnt('static const struct _cffi_type_context_s _cffi_type_context = {')
+ prnt(' _cffi_types,')
+ for step_name in self.ALL_STEPS:
+ if nums[step_name] > 0:
+ prnt(' _cffi_%ss,' % step_name)
+ else:
+ prnt(' NULL, /* no %ss */' % step_name)
+ for step_name in self.ALL_STEPS:
+ if step_name != "field":
+ prnt(' %d, /* num_%ss */' % (nums[step_name], step_name))
+ if self.ffi._included_ffis:
+ prnt(' _cffi_includes,')
+ else:
+ prnt(' NULL, /* no includes */')
+ prnt(' %d, /* num_types */' % (len(self.cffi_types),))
+ flags = 0
+ if self._num_externpy > 0 or self.ffi._embedding is not None:
+ flags |= 1 # set to mean that we use extern "Python"
+ prnt(' %d, /* flags */' % flags)
+ prnt('};')
+ prnt()
+ #
+ # the init function
+ prnt('#ifdef __GNUC__')
+ prnt('# pragma GCC visibility push(default) /* for -fvisibility= */')
+ prnt('#endif')
+ prnt()
+ prnt('#ifdef PYPY_VERSION')
+ prnt('PyMODINIT_FUNC')
+ prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,))
+ prnt('{')
+ if flags & 1:
+ prnt(' if (((intptr_t)p[0]) >= 0x0A03) {')
+ prnt(' _cffi_call_python_org = '
+ '(void(*)(struct _cffi_externpy_s *, char *))p[1];')
+ prnt(' }')
+ prnt(' p[0] = (const void *)0x%x;' % self._version)
+ prnt(' p[1] = &_cffi_type_context;')
+ prnt('#if PY_MAJOR_VERSION >= 3')
+ prnt(' return NULL;')
+ prnt('#endif')
+ prnt('}')
+ # on Windows, distutils insists on putting init_cffi_xyz in
+ # 'export_symbols', so instead of fighting it, just give up and
+ # give it one
+ prnt('# ifdef _MSC_VER')
+ prnt(' PyMODINIT_FUNC')
+ prnt('# if PY_MAJOR_VERSION >= 3')
+ prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,))
+ prnt('# else')
+ prnt(' init%s(void) { }' % (base_module_name,))
+ prnt('# endif')
+ prnt('# endif')
+ prnt('#elif PY_MAJOR_VERSION >= 3')
+ prnt('PyMODINIT_FUNC')
+ prnt('PyInit_%s(void)' % (base_module_name,))
+ prnt('{')
+ prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % (
+ self.module_name, self._version))
+ prnt('}')
+ prnt('#else')
+ prnt('PyMODINIT_FUNC')
+ prnt('init%s(void)' % (base_module_name,))
+ prnt('{')
+ prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % (
+ self.module_name, self._version))
+ prnt('}')
+ prnt('#endif')
+ prnt()
+ prnt('#ifdef __GNUC__')
+ prnt('# pragma GCC visibility pop')
+ prnt('#endif')
+ self._version = None
+
+ def _to_py(self, x):
+ if isinstance(x, str):
+ return "b'%s'" % (x,)
+ if isinstance(x, (list, tuple)):
+ rep = [self._to_py(item) for item in x]
+ if len(rep) == 1:
+ rep.append('')
+ return "(%s)" % (','.join(rep),)
+ return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp.
+
+ def write_py_source_to_f(self, f):
+ self._f = f
+ prnt = self._prnt
+ #
+ # header
+ prnt("# auto-generated file")
+ prnt("import _cffi_backend")
+ #
+ # the 'import' of the included ffis
+ num_includes = len(self.ffi._included_ffis or ())
+ for i in range(num_includes):
+ ffi_to_include = self.ffi._included_ffis[i]
+ try:
+ included_module_name, included_source = (
+ ffi_to_include._assigned_source[:2])
+ except AttributeError:
+ raise VerificationError(
+ "ffi object %r includes %r, but the latter has not "
+ "been prepared with set_source()" % (
+ self.ffi, ffi_to_include,))
+ if included_source is not None:
+ raise VerificationError(
+ "not implemented yet: ffi.include() of a C-based "
+ "ffi inside a Python-based ffi")
+ prnt('from %s import ffi as _ffi%d' % (included_module_name, i))
+ prnt()
+ prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,))
+ prnt(" _version = 0x%x," % (self._version,))
+ self._version = None
+ #
+ # the '_types' keyword argument
+ self.cffi_types = tuple(self.cffi_types) # don't change any more
+ types_lst = [op.as_python_bytes() for op in self.cffi_types]
+ prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),))
+ typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()])
+ #
+ # the keyword arguments from ALL_STEPS
+ for step_name in self.ALL_STEPS:
+ lst = self._lsts[step_name]
+ if len(lst) > 0 and step_name != "field":
+ prnt(' _%ss = %s,' % (step_name, self._to_py(lst)))
+ #
+ # the '_includes' keyword argument
+ if num_includes > 0:
+ prnt(' _includes = (%s,),' % (
+ ', '.join(['_ffi%d' % i for i in range(num_includes)]),))
+ #
+ # the footer
+ prnt(')')
+
+ # ----------
+
+ def _gettypenum(self, type):
+ # a KeyError here is a bug. please report it! :-)
+ return self._typesdict[type]
+
+ def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode):
+ extraarg = ''
+ if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type():
+ if tp.is_integer_type() and tp.name != '_Bool':
+ converter = '_cffi_to_c_int'
+ extraarg = ', %s' % tp.name
+ elif isinstance(tp, model.UnknownFloatType):
+ # don't check with is_float_type(): it may be a 'long
+ # double' here, and _cffi_to_c_double would loose precision
+ converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),)
+ else:
+ cname = tp.get_c_name('')
+ converter = '(%s)_cffi_to_c_%s' % (cname,
+ tp.name.replace(' ', '_'))
+ if cname in ('char16_t', 'char32_t'):
+ self.needs_version(VERSION_CHAR16CHAR32)
+ errvalue = '-1'
+ #
+ elif isinstance(tp, model.PointerType):
+ self._convert_funcarg_to_c_ptr_or_array(tp, fromvar,
+ tovar, errcode)
+ return
+ #
+ elif (isinstance(tp, model.StructOrUnionOrEnum) or
+ isinstance(tp, model.BasePrimitiveType)):
+ # a struct (not a struct pointer) as a function argument;
+ # or, a complex (the same code works)
+ self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)'
+ % (tovar, self._gettypenum(tp), fromvar))
+ self._prnt(' %s;' % errcode)
+ return
+ #
+ elif isinstance(tp, model.FunctionPtrType):
+ converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('')
+ extraarg = ', _cffi_type(%d)' % self._gettypenum(tp)
+ errvalue = 'NULL'
+ #
+ else:
+ raise NotImplementedError(tp)
+ #
+ self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg))
+ self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % (
+ tovar, tp.get_c_name(''), errvalue))
+ self._prnt(' %s;' % errcode)
+
+ def _extra_local_variables(self, tp, localvars, freelines):
+ if isinstance(tp, model.PointerType):
+ localvars.add('Py_ssize_t datasize')
+ localvars.add('struct _cffi_freeme_s *large_args_free = NULL')
+ freelines.add('if (large_args_free != NULL)'
+ ' _cffi_free_array_arguments(large_args_free);')
+
+ def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode):
+ self._prnt(' datasize = _cffi_prepare_pointer_call_argument(')
+ self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % (
+ self._gettypenum(tp), fromvar, tovar))
+ self._prnt(' if (datasize != 0) {')
+ self._prnt(' %s = ((size_t)datasize) <= 640 ? '
+ '(%s)alloca((size_t)datasize) : NULL;' % (
+ tovar, tp.get_c_name('')))
+ self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, '
+ '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar))
+ self._prnt(' datasize, &large_args_free) < 0)')
+ self._prnt(' %s;' % errcode)
+ self._prnt(' }')
+
+ def _convert_expr_from_c(self, tp, var, context):
+ if isinstance(tp, model.BasePrimitiveType):
+ if tp.is_integer_type() and tp.name != '_Bool':
+ return '_cffi_from_c_int(%s, %s)' % (var, tp.name)
+ elif isinstance(tp, model.UnknownFloatType):
+ return '_cffi_from_c_double(%s)' % (var,)
+ elif tp.name != 'long double' and not tp.is_complex_type():
+ cname = tp.name.replace(' ', '_')
+ if cname in ('char16_t', 'char32_t'):
+ self.needs_version(VERSION_CHAR16CHAR32)
+ return '_cffi_from_c_%s(%s)' % (cname, var)
+ else:
+ return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
+ var, self._gettypenum(tp))
+ elif isinstance(tp, (model.PointerType, model.FunctionPtrType)):
+ return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
+ var, self._gettypenum(tp))
+ elif isinstance(tp, model.ArrayType):
+ return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
+ var, self._gettypenum(model.PointerType(tp.item)))
+ elif isinstance(tp, model.StructOrUnion):
+ if tp.fldnames is None:
+ raise TypeError("'%s' is used as %s, but is opaque" % (
+ tp._get_c_name(), context))
+ return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % (
+ var, self._gettypenum(tp))
+ elif isinstance(tp, model.EnumType):
+ return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
+ var, self._gettypenum(tp))
+ else:
+ raise NotImplementedError(tp)
+
+ # ----------
+ # typedefs
+
+ def _typedef_type(self, tp, name):
+ return self._global_type(tp, "(*(%s *)0)" % (name,))
+
+ def _generate_cpy_typedef_collecttype(self, tp, name):
+ self._do_collect_type(self._typedef_type(tp, name))
+
+ def _generate_cpy_typedef_decl(self, tp, name):
+ pass
+
+ def _typedef_ctx(self, tp, name):
+ type_index = self._typesdict[tp]
+ self._lsts["typename"].append(TypenameExpr(name, type_index))
+
+ def _generate_cpy_typedef_ctx(self, tp, name):
+ tp = self._typedef_type(tp, name)
+ self._typedef_ctx(tp, name)
+ if getattr(tp, "origin", None) == "unknown_type":
+ self._struct_ctx(tp, tp.name, approxname=None)
+ elif isinstance(tp, model.NamedPointerType):
+ self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name,
+ named_ptr=tp)
+
+ # ----------
+ # function declarations
+
+ def _generate_cpy_function_collecttype(self, tp, name):
+ self._do_collect_type(tp.as_raw_function())
+ if tp.ellipsis and not self.target_is_python:
+ self._do_collect_type(tp)
+
+ def _generate_cpy_function_decl(self, tp, name):
+ assert not self.target_is_python
+ assert isinstance(tp, model.FunctionPtrType)
+ if tp.ellipsis:
+ # cannot support vararg functions better than this: check for its
+ # exact type (including the fixed arguments), and build it as a
+ # constant function pointer (no CPython wrapper)
+ self._generate_cpy_constant_decl(tp, name)
+ return
+ prnt = self._prnt
+ numargs = len(tp.args)
+ if numargs == 0:
+ argname = 'noarg'
+ elif numargs == 1:
+ argname = 'arg0'
+ else:
+ argname = 'args'
+ #
+ # ------------------------------
+ # the 'd' version of the function, only for addressof(lib, 'func')
+ arguments = []
+ call_arguments = []
+ context = 'argument of %s' % name
+ for i, type in enumerate(tp.args):
+ arguments.append(type.get_c_name(' x%d' % i, context))
+ call_arguments.append('x%d' % i)
+ repr_arguments = ', '.join(arguments)
+ repr_arguments = repr_arguments or 'void'
+ if tp.abi:
+ abi = tp.abi + ' '
+ else:
+ abi = ''
+ name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments)
+ prnt('static %s' % (tp.result.get_c_name(name_and_arguments),))
+ prnt('{')
+ call_arguments = ', '.join(call_arguments)
+ result_code = 'return '
+ if isinstance(tp.result, model.VoidType):
+ result_code = ''
+ prnt(' %s%s(%s);' % (result_code, name, call_arguments))
+ prnt('}')
+ #
+ prnt('#ifndef PYPY_VERSION') # ------------------------------
+ #
+ prnt('static PyObject *')
+ prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname))
+ prnt('{')
+ #
+ context = 'argument of %s' % name
+ for i, type in enumerate(tp.args):
+ arg = type.get_c_name(' x%d' % i, context)
+ prnt(' %s;' % arg)
+ #
+ localvars = set()
+ freelines = set()
+ for type in tp.args:
+ self._extra_local_variables(type, localvars, freelines)
+ for decl in sorted(localvars):
+ prnt(' %s;' % (decl,))
+ #
+ if not isinstance(tp.result, model.VoidType):
+ result_code = 'result = '
+ context = 'result of %s' % name
+ result_decl = ' %s;' % tp.result.get_c_name(' result', context)
+ prnt(result_decl)
+ prnt(' PyObject *pyresult;')
+ else:
+ result_decl = None
+ result_code = ''
+ #
+ if len(tp.args) > 1:
+ rng = range(len(tp.args))
+ for i in rng:
+ prnt(' PyObject *arg%d;' % i)
+ prnt()
+ prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % (
+ name, len(rng), len(rng),
+ ', '.join(['&arg%d' % i for i in rng])))
+ prnt(' return NULL;')
+ prnt()
+ #
+ for i, type in enumerate(tp.args):
+ self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i,
+ 'return NULL')
+ prnt()
+ #
+ prnt(' Py_BEGIN_ALLOW_THREADS')
+ prnt(' _cffi_restore_errno();')
+ call_arguments = ['x%d' % i for i in range(len(tp.args))]
+ call_arguments = ', '.join(call_arguments)
+ prnt(' { %s%s(%s); }' % (result_code, name, call_arguments))
+ prnt(' _cffi_save_errno();')
+ prnt(' Py_END_ALLOW_THREADS')
+ prnt()
+ #
+ prnt(' (void)self; /* unused */')
+ if numargs == 0:
+ prnt(' (void)noarg; /* unused */')
+ if result_code:
+ prnt(' pyresult = %s;' %
+ self._convert_expr_from_c(tp.result, 'result', 'result type'))
+ for freeline in freelines:
+ prnt(' ' + freeline)
+ prnt(' return pyresult;')
+ else:
+ for freeline in freelines:
+ prnt(' ' + freeline)
+ prnt(' Py_INCREF(Py_None);')
+ prnt(' return Py_None;')
+ prnt('}')
+ #
+ prnt('#else') # ------------------------------
+ #
+ # the PyPy version: need to replace struct/union arguments with
+ # pointers, and if the result is a struct/union, insert a first
+ # arg that is a pointer to the result. We also do that for
+ # complex args and return type.
+ def need_indirection(type):
+ return (isinstance(type, model.StructOrUnion) or
+ (isinstance(type, model.PrimitiveType) and
+ type.is_complex_type()))
+ difference = False
+ arguments = []
+ call_arguments = []
+ context = 'argument of %s' % name
+ for i, type in enumerate(tp.args):
+ indirection = ''
+ if need_indirection(type):
+ indirection = '*'
+ difference = True
+ arg = type.get_c_name(' %sx%d' % (indirection, i), context)
+ arguments.append(arg)
+ call_arguments.append('%sx%d' % (indirection, i))
+ tp_result = tp.result
+ if need_indirection(tp_result):
+ context = 'result of %s' % name
+ arg = tp_result.get_c_name(' *result', context)
+ arguments.insert(0, arg)
+ tp_result = model.void_type
+ result_decl = None
+ result_code = '*result = '
+ difference = True
+ if difference:
+ repr_arguments = ', '.join(arguments)
+ repr_arguments = repr_arguments or 'void'
+ name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name,
+ repr_arguments)
+ prnt('static %s' % (tp_result.get_c_name(name_and_arguments),))
+ prnt('{')
+ if result_decl:
+ prnt(result_decl)
+ call_arguments = ', '.join(call_arguments)
+ prnt(' { %s%s(%s); }' % (result_code, name, call_arguments))
+ if result_decl:
+ prnt(' return result;')
+ prnt('}')
+ else:
+ prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name))
+ #
+ prnt('#endif') # ------------------------------
+ prnt()
+
+ def _generate_cpy_function_ctx(self, tp, name):
+ if tp.ellipsis and not self.target_is_python:
+ self._generate_cpy_constant_ctx(tp, name)
+ return
+ type_index = self._typesdict[tp.as_raw_function()]
+ numargs = len(tp.args)
+ if self.target_is_python:
+ meth_kind = OP_DLOPEN_FUNC
+ elif numargs == 0:
+ meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS'
+ elif numargs == 1:
+ meth_kind = OP_CPYTHON_BLTN_O # 'METH_O'
+ else:
+ meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS'
+ self._lsts["global"].append(
+ GlobalExpr(name, '_cffi_f_%s' % name,
+ CffiOp(meth_kind, type_index),
+ size='_cffi_d_%s' % name))
+
+ # ----------
+ # named structs or unions
+
+ def _field_type(self, tp_struct, field_name, tp_field):
+ if isinstance(tp_field, model.ArrayType):
+ actual_length = tp_field.length
+ if actual_length == '...':
+ ptr_struct_name = tp_struct.get_c_name('*')
+ actual_length = '_cffi_array_len(((%s)0)->%s)' % (
+ ptr_struct_name, field_name)
+ tp_item = self._field_type(tp_struct, '%s[0]' % field_name,
+ tp_field.item)
+ tp_field = model.ArrayType(tp_item, actual_length)
+ return tp_field
+
+ def _struct_collecttype(self, tp):
+ self._do_collect_type(tp)
+ if self.target_is_python:
+ # also requires nested anon struct/unions in ABI mode, recursively
+ for fldtype in tp.anonymous_struct_fields():
+ self._struct_collecttype(fldtype)
+
+ def _struct_decl(self, tp, cname, approxname):
+ if tp.fldtypes is None:
+ return
+ prnt = self._prnt
+ checkfuncname = '_cffi_checkfld_%s' % (approxname,)
+ prnt('_CFFI_UNUSED_FN')
+ prnt('static void %s(%s *p)' % (checkfuncname, cname))
+ prnt('{')
+ prnt(' /* only to generate compile-time warnings or errors */')
+ prnt(' (void)p;')
+ for fname, ftype, fbitsize, fqual in self._enum_fields(tp):
+ try:
+ if ftype.is_integer_type() or fbitsize >= 0:
+ # accept all integers, but complain on float or double
+ if fname != '':
+ prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is "
+ "an integer */" % (fname, cname, fname))
+ continue
+ # only accept exactly the type declared, except that '[]'
+ # is interpreted as a '*' and so will match any array length.
+ # (It would also match '*', but that's harder to detect...)
+ while (isinstance(ftype, model.ArrayType)
+ and (ftype.length is None or ftype.length == '...')):
+ ftype = ftype.item
+ fname = fname + '[0]'
+ prnt(' { %s = &p->%s; (void)tmp; }' % (
+ ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
+ fname))
+ except VerificationError as e:
+ prnt(' /* %s */' % str(e)) # cannot verify it, ignore
+ prnt('}')
+ prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname))
+ prnt()
+
+ def _struct_ctx(self, tp, cname, approxname, named_ptr=None):
+ type_index = self._typesdict[tp]
+ reason_for_not_expanding = None
+ flags = []
+ if isinstance(tp, model.UnionType):
+ flags.append("_CFFI_F_UNION")
+ if tp.fldtypes is None:
+ flags.append("_CFFI_F_OPAQUE")
+ reason_for_not_expanding = "opaque"
+ if (tp not in self.ffi._parser._included_declarations and
+ (named_ptr is None or
+ named_ptr not in self.ffi._parser._included_declarations)):
+ if tp.fldtypes is None:
+ pass # opaque
+ elif tp.partial or any(tp.anonymous_struct_fields()):
+ pass # field layout obtained silently from the C compiler
+ else:
+ flags.append("_CFFI_F_CHECK_FIELDS")
+ if tp.packed:
+ if tp.packed > 1:
+ raise NotImplementedError(
+ "%r is declared with 'pack=%r'; only 0 or 1 are "
+ "supported in API mode (try to use \"...;\", which "
+ "does not require a 'pack' declaration)" %
+ (tp, tp.packed))
+ flags.append("_CFFI_F_PACKED")
+ else:
+ flags.append("_CFFI_F_EXTERNAL")
+ reason_for_not_expanding = "external"
+ flags = '|'.join(flags) or '0'
+ c_fields = []
+ if reason_for_not_expanding is None:
+ enumfields = list(self._enum_fields(tp))
+ for fldname, fldtype, fbitsize, fqual in enumfields:
+ fldtype = self._field_type(tp, fldname, fldtype)
+ self._check_not_opaque(fldtype,
+ "field '%s.%s'" % (tp.name, fldname))
+ # cname is None for _add_missing_struct_unions() only
+ op = OP_NOOP
+ if fbitsize >= 0:
+ op = OP_BITFIELD
+ size = '%d /* bits */' % fbitsize
+ elif cname is None or (
+ isinstance(fldtype, model.ArrayType) and
+ fldtype.length is None):
+ size = '(size_t)-1'
+ else:
+ size = 'sizeof(((%s)0)->%s)' % (
+ tp.get_c_name('*') if named_ptr is None
+ else named_ptr.name,
+ fldname)
+ if cname is None or fbitsize >= 0:
+ offset = '(size_t)-1'
+ elif named_ptr is not None:
+ offset = '(size_t)(((char *)&((%s)4096)->%s) - (char *)4096)' % (
+ named_ptr.name, fldname)
+ else:
+ offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname)
+ c_fields.append(
+ FieldExpr(fldname, offset, size, fbitsize,
+ CffiOp(op, self._typesdict[fldtype])))
+ first_field_index = len(self._lsts["field"])
+ self._lsts["field"].extend(c_fields)
+ #
+ if cname is None: # unknown name, for _add_missing_struct_unions
+ size = '(size_t)-2'
+ align = -2
+ comment = "unnamed"
+ else:
+ if named_ptr is not None:
+ size = 'sizeof(*(%s)0)' % (named_ptr.name,)
+ align = '-1 /* unknown alignment */'
+ else:
+ size = 'sizeof(%s)' % (cname,)
+ align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,)
+ comment = None
+ else:
+ size = '(size_t)-1'
+ align = -1
+ first_field_index = -1
+ comment = reason_for_not_expanding
+ self._lsts["struct_union"].append(
+ StructUnionExpr(tp.name, type_index, flags, size, align, comment,
+ first_field_index, c_fields))
+ self._seen_struct_unions.add(tp)
+
+ def _check_not_opaque(self, tp, location):
+ while isinstance(tp, model.ArrayType):
+ tp = tp.item
+ if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None:
+ raise TypeError(
+ "%s is of an opaque type (not declared in cdef())" % location)
+
+ def _add_missing_struct_unions(self):
+ # not very nice, but some struct declarations might be missing
+ # because they don't have any known C name. Check that they are
+ # not partial (we can't complete or verify them!) and emit them
+ # anonymously.
+ lst = list(self._struct_unions.items())
+ lst.sort(key=lambda tp_order: tp_order[1])
+ for tp, order in lst:
+ if tp not in self._seen_struct_unions:
+ if tp.partial:
+ raise NotImplementedError("internal inconsistency: %r is "
+ "partial but was not seen at "
+ "this point" % (tp,))
+ if tp.name.startswith('$') and tp.name[1:].isdigit():
+ approxname = tp.name[1:]
+ elif tp.name == '_IO_FILE' and tp.forcename == 'FILE':
+ approxname = 'FILE'
+ self._typedef_ctx(tp, 'FILE')
+ else:
+ raise NotImplementedError("internal inconsistency: %r" %
+ (tp,))
+ self._struct_ctx(tp, None, approxname)
+
+ def _generate_cpy_struct_collecttype(self, tp, name):
+ self._struct_collecttype(tp)
+ _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype
+
+ def _struct_names(self, tp):
+ cname = tp.get_c_name('')
+ if ' ' in cname:
+ return cname, cname.replace(' ', '_')
+ else:
+ return cname, '_' + cname
+
+ def _generate_cpy_struct_decl(self, tp, name):
+ self._struct_decl(tp, *self._struct_names(tp))
+ _generate_cpy_union_decl = _generate_cpy_struct_decl
+
+ def _generate_cpy_struct_ctx(self, tp, name):
+ self._struct_ctx(tp, *self._struct_names(tp))
+ _generate_cpy_union_ctx = _generate_cpy_struct_ctx
+
+ # ----------
+ # 'anonymous' declarations. These are produced for anonymous structs
+ # or unions; the 'name' is obtained by a typedef.
+
+ def _generate_cpy_anonymous_collecttype(self, tp, name):
+ if isinstance(tp, model.EnumType):
+ self._generate_cpy_enum_collecttype(tp, name)
+ else:
+ self._struct_collecttype(tp)
+
+ def _generate_cpy_anonymous_decl(self, tp, name):
+ if isinstance(tp, model.EnumType):
+ self._generate_cpy_enum_decl(tp)
+ else:
+ self._struct_decl(tp, name, 'typedef_' + name)
+
+ def _generate_cpy_anonymous_ctx(self, tp, name):
+ if isinstance(tp, model.EnumType):
+ self._enum_ctx(tp, name)
+ else:
+ self._struct_ctx(tp, name, 'typedef_' + name)
+
+ # ----------
+ # constants, declared with "static const ..."
+
+ def _generate_cpy_const(self, is_int, name, tp=None, category='const',
+ check_value=None):
+ if (category, name) in self._seen_constants:
+ raise VerificationError(
+ "duplicate declaration of %s '%s'" % (category, name))
+ self._seen_constants.add((category, name))
+ #
+ prnt = self._prnt
+ funcname = '_cffi_%s_%s' % (category, name)
+ if is_int:
+ prnt('static int %s(unsigned long long *o)' % funcname)
+ prnt('{')
+ prnt(' int n = (%s) <= 0;' % (name,))
+ prnt(' *o = (unsigned long long)((%s) | 0);'
+ ' /* check that %s is an integer */' % (name, name))
+ if check_value is not None:
+ if check_value > 0:
+ check_value = '%dU' % (check_value,)
+ prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,))
+ prnt(' n |= 2;')
+ prnt(' return n;')
+ prnt('}')
+ else:
+ assert check_value is None
+ prnt('static void %s(char *o)' % funcname)
+ prnt('{')
+ prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name))
+ prnt('}')
+ prnt()
+
+ def _generate_cpy_constant_collecttype(self, tp, name):
+ is_int = tp.is_integer_type()
+ if not is_int or self.target_is_python:
+ self._do_collect_type(tp)
+
+ def _generate_cpy_constant_decl(self, tp, name):
+ is_int = tp.is_integer_type()
+ self._generate_cpy_const(is_int, name, tp)
+
+ def _generate_cpy_constant_ctx(self, tp, name):
+ if not self.target_is_python and tp.is_integer_type():
+ type_op = CffiOp(OP_CONSTANT_INT, -1)
+ else:
+ if self.target_is_python:
+ const_kind = OP_DLOPEN_CONST
+ else:
+ const_kind = OP_CONSTANT
+ type_index = self._typesdict[tp]
+ type_op = CffiOp(const_kind, type_index)
+ self._lsts["global"].append(
+ GlobalExpr(name, '_cffi_const_%s' % name, type_op))
+
+ # ----------
+ # enums
+
+ def _generate_cpy_enum_collecttype(self, tp, name):
+ self._do_collect_type(tp)
+
+ def _generate_cpy_enum_decl(self, tp, name=None):
+ for enumerator in tp.enumerators:
+ self._generate_cpy_const(True, enumerator)
+
+ def _enum_ctx(self, tp, cname):
+ type_index = self._typesdict[tp]
+ type_op = CffiOp(OP_ENUM, -1)
+ if self.target_is_python:
+ tp.check_not_partial()
+ for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
+ self._lsts["global"].append(
+ GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op,
+ check_value=enumvalue))
+ #
+ if cname is not None and '$' not in cname and not self.target_is_python:
+ size = "sizeof(%s)" % cname
+ signed = "((%s)-1) <= 0" % cname
+ else:
+ basetp = tp.build_baseinttype(self.ffi, [])
+ size = self.ffi.sizeof(basetp)
+ signed = int(int(self.ffi.cast(basetp, -1)) < 0)
+ allenums = ",".join(tp.enumerators)
+ self._lsts["enum"].append(
+ EnumExpr(tp.name, type_index, size, signed, allenums))
+
+ def _generate_cpy_enum_ctx(self, tp, name):
+ self._enum_ctx(tp, tp._get_c_name())
+
+ # ----------
+ # macros: for now only for integers
+
+ def _generate_cpy_macro_collecttype(self, tp, name):
+ pass
+
+ def _generate_cpy_macro_decl(self, tp, name):
+ if tp == '...':
+ check_value = None
+ else:
+ check_value = tp # an integer
+ self._generate_cpy_const(True, name, check_value=check_value)
+
+ def _generate_cpy_macro_ctx(self, tp, name):
+ if tp == '...':
+ if self.target_is_python:
+ raise VerificationError(
+ "cannot use the syntax '...' in '#define %s ...' when "
+ "using the ABI mode" % (name,))
+ check_value = None
+ else:
+ check_value = tp # an integer
+ type_op = CffiOp(OP_CONSTANT_INT, -1)
+ self._lsts["global"].append(
+ GlobalExpr(name, '_cffi_const_%s' % name, type_op,
+ check_value=check_value))
+
+ # ----------
+ # global variables
+
+ def _global_type(self, tp, global_name):
+ if isinstance(tp, model.ArrayType):
+ actual_length = tp.length
+ if actual_length == '...':
+ actual_length = '_cffi_array_len(%s)' % (global_name,)
+ tp_item = self._global_type(tp.item, '%s[0]' % global_name)
+ tp = model.ArrayType(tp_item, actual_length)
+ return tp
+
+ def _generate_cpy_variable_collecttype(self, tp, name):
+ self._do_collect_type(self._global_type(tp, name))
+
+ def _generate_cpy_variable_decl(self, tp, name):
+ prnt = self._prnt
+ tp = self._global_type(tp, name)
+ if isinstance(tp, model.ArrayType) and tp.length is None:
+ tp = tp.item
+ ampersand = ''
+ else:
+ ampersand = '&'
+ # This code assumes that casts from "tp *" to "void *" is a
+ # no-op, i.e. a function that returns a "tp *" can be called
+ # as if it returned a "void *". This should be generally true
+ # on any modern machine. The only exception to that rule (on
+ # uncommon architectures, and as far as I can tell) might be
+ # if 'tp' were a function type, but that is not possible here.
+ # (If 'tp' is a function _pointer_ type, then casts from "fn_t
+ # **" to "void *" are again no-ops, as far as I can tell.)
+ decl = '*_cffi_var_%s(void)' % (name,)
+ prnt('static ' + tp.get_c_name(decl, quals=self._current_quals))
+ prnt('{')
+ prnt(' return %s(%s);' % (ampersand, name))
+ prnt('}')
+ prnt()
+
+ def _generate_cpy_variable_ctx(self, tp, name):
+ tp = self._global_type(tp, name)
+ type_index = self._typesdict[tp]
+ if self.target_is_python:
+ op = OP_GLOBAL_VAR
+ else:
+ op = OP_GLOBAL_VAR_F
+ self._lsts["global"].append(
+ GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index)))
+
+ # ----------
+ # extern "Python"
+
+ def _generate_cpy_extern_python_collecttype(self, tp, name):
+ assert isinstance(tp, model.FunctionPtrType)
+ self._do_collect_type(tp)
+ _generate_cpy_dllexport_python_collecttype = \
+ _generate_cpy_extern_python_plus_c_collecttype = \
+ _generate_cpy_extern_python_collecttype
+
+ def _extern_python_decl(self, tp, name, tag_and_space):
+ prnt = self._prnt
+ if isinstance(tp.result, model.VoidType):
+ size_of_result = '0'
+ else:
+ context = 'result of %s' % name
+ size_of_result = '(int)sizeof(%s)' % (
+ tp.result.get_c_name('', context),)
+ prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name)
+ prnt(' { "%s.%s", %s, 0, 0 };' % (
+ self.module_name, name, size_of_result))
+ prnt()
+ #
+ arguments = []
+ context = 'argument of %s' % name
+ for i, type in enumerate(tp.args):
+ arg = type.get_c_name(' a%d' % i, context)
+ arguments.append(arg)
+ #
+ repr_arguments = ', '.join(arguments)
+ repr_arguments = repr_arguments or 'void'
+ name_and_arguments = '%s(%s)' % (name, repr_arguments)
+ if tp.abi == "__stdcall":
+ name_and_arguments = '_cffi_stdcall ' + name_and_arguments
+ #
+ def may_need_128_bits(tp):
+ return (isinstance(tp, model.PrimitiveType) and
+ tp.name == 'long double')
+ #
+ size_of_a = max(len(tp.args)*8, 8)
+ if may_need_128_bits(tp.result):
+ size_of_a = max(size_of_a, 16)
+ if isinstance(tp.result, model.StructOrUnion):
+ size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % (
+ tp.result.get_c_name(''), size_of_a,
+ tp.result.get_c_name(''), size_of_a)
+ prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments)))
+ prnt('{')
+ prnt(' char a[%s];' % size_of_a)
+ prnt(' char *p = a;')
+ for i, type in enumerate(tp.args):
+ arg = 'a%d' % i
+ if (isinstance(type, model.StructOrUnion) or
+ may_need_128_bits(type)):
+ arg = '&' + arg
+ type = model.PointerType(type)
+ prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg))
+ prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name)
+ if not isinstance(tp.result, model.VoidType):
+ prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),))
+ prnt('}')
+ prnt()
+ self._num_externpy += 1
+
+ def _generate_cpy_extern_python_decl(self, tp, name):
+ self._extern_python_decl(tp, name, 'static ')
+
+ def _generate_cpy_dllexport_python_decl(self, tp, name):
+ self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ')
+
+ def _generate_cpy_extern_python_plus_c_decl(self, tp, name):
+ self._extern_python_decl(tp, name, '')
+
+ def _generate_cpy_extern_python_ctx(self, tp, name):
+ if self.target_is_python:
+ raise VerificationError(
+ "cannot use 'extern \"Python\"' in the ABI mode")
+ if tp.ellipsis:
+ raise NotImplementedError("a vararg function is extern \"Python\"")
+ type_index = self._typesdict[tp]
+ type_op = CffiOp(OP_EXTERN_PYTHON, type_index)
+ self._lsts["global"].append(
+ GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name))
+
+ _generate_cpy_dllexport_python_ctx = \
+ _generate_cpy_extern_python_plus_c_ctx = \
+ _generate_cpy_extern_python_ctx
+
+ def _print_string_literal_in_array(self, s):
+ prnt = self._prnt
+ prnt('// # NB. this is not a string because of a size limit in MSVC')
+ if not isinstance(s, bytes): # unicode
+ s = s.encode('utf-8') # -> bytes
+ else:
+ s.decode('utf-8') # got bytes, check for valid utf-8
+ try:
+ s.decode('ascii')
+ except UnicodeDecodeError:
+ s = b'# -*- encoding: utf8 -*-\n' + s
+ for line in s.splitlines(True):
+ comment = line
+ if type('//') is bytes: # python2
+ line = map(ord, line) # make a list of integers
+ else: # python3
+ # type(line) is bytes, which enumerates like a list of integers
+ comment = ascii(comment)[1:-1]
+ prnt(('// ' + comment).rstrip())
+ printed_line = ''
+ for c in line:
+ if len(printed_line) >= 76:
+ prnt(printed_line)
+ printed_line = ''
+ printed_line += '%d,' % (c,)
+ prnt(printed_line)
+
+ # ----------
+ # emitting the opcodes for individual types
+
+ def _emit_bytecode_VoidType(self, tp, index):
+ self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID)
+
+ def _emit_bytecode_PrimitiveType(self, tp, index):
+ prim_index = PRIMITIVE_TO_INDEX[tp.name]
+ self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index)
+
+ def _emit_bytecode_UnknownIntegerType(self, tp, index):
+ s = ('_cffi_prim_int(sizeof(%s), (\n'
+ ' ((%s)-1) | 0 /* check that %s is an integer type */\n'
+ ' ) <= 0)' % (tp.name, tp.name, tp.name))
+ self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s)
+
+ def _emit_bytecode_UnknownFloatType(self, tp, index):
+ s = ('_cffi_prim_float(sizeof(%s) *\n'
+ ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n'
+ ' )' % (tp.name, tp.name))
+ self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s)
+
+ def _emit_bytecode_RawFunctionType(self, tp, index):
+ self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result])
+ index += 1
+ for tp1 in tp.args:
+ realindex = self._typesdict[tp1]
+ if index != realindex:
+ if isinstance(tp1, model.PrimitiveType):
+ self._emit_bytecode_PrimitiveType(tp1, index)
+ else:
+ self.cffi_types[index] = CffiOp(OP_NOOP, realindex)
+ index += 1
+ flags = int(tp.ellipsis)
+ if tp.abi is not None:
+ if tp.abi == '__stdcall':
+ flags |= 2
+ else:
+ raise NotImplementedError("abi=%r" % (tp.abi,))
+ self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags)
+
+ def _emit_bytecode_PointerType(self, tp, index):
+ self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype])
+
+ _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType
+ _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType
+
+ def _emit_bytecode_FunctionPtrType(self, tp, index):
+ raw = tp.as_raw_function()
+ self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw])
+
+ def _emit_bytecode_ArrayType(self, tp, index):
+ item_index = self._typesdict[tp.item]
+ if tp.length is None:
+ self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index)
+ elif tp.length == '...':
+ raise VerificationError(
+ "type %s badly placed: the '...' array length can only be "
+ "used on global arrays or on fields of structures" % (
+ str(tp).replace('/*...*/', '...'),))
+ else:
+ assert self.cffi_types[index + 1] == 'LEN'
+ self.cffi_types[index] = CffiOp(OP_ARRAY, item_index)
+ self.cffi_types[index + 1] = CffiOp(None, str(tp.length))
+
+ def _emit_bytecode_StructType(self, tp, index):
+ struct_index = self._struct_unions[tp]
+ self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index)
+ _emit_bytecode_UnionType = _emit_bytecode_StructType
+
+ def _emit_bytecode_EnumType(self, tp, index):
+ enum_index = self._enums[tp]
+ self.cffi_types[index] = CffiOp(OP_ENUM, enum_index)
+
+
+if sys.version_info >= (3,):
+ NativeIO = io.StringIO
+else:
+ class NativeIO(io.BytesIO):
+ def write(self, s):
+ if isinstance(s, unicode):
+ s = s.encode('ascii')
+ super(NativeIO, self).write(s)
+
+def _is_file_like(maybefile):
+ # compare to xml.etree.ElementTree._get_writer
+ return hasattr(maybefile, 'write')
+
+def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose):
+ if verbose:
+ print("generating %s" % (target_file,))
+ recompiler = Recompiler(ffi, module_name,
+ target_is_python=(preamble is None))
+ recompiler.collect_type_table()
+ recompiler.collect_step_tables()
+ if _is_file_like(target_file):
+ recompiler.write_source_to_f(target_file, preamble)
+ return True
+ f = NativeIO()
+ recompiler.write_source_to_f(f, preamble)
+ output = f.getvalue()
+ try:
+ with open(target_file, 'r') as f1:
+ if f1.read(len(output) + 1) != output:
+ raise IOError
+ if verbose:
+ print("(already up-to-date)")
+ return False # already up-to-date
+ except IOError:
+ tmp_file = '%s.~%d' % (target_file, os.getpid())
+ with open(tmp_file, 'w') as f1:
+ f1.write(output)
+ try:
+ os.rename(tmp_file, target_file)
+ except OSError:
+ os.unlink(target_file)
+ os.rename(tmp_file, target_file)
+ return True
+
+def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False):
+ assert preamble is not None
+ return _make_c_or_py_source(ffi, module_name, preamble, target_c_file,
+ verbose)
+
+def make_py_source(ffi, module_name, target_py_file, verbose=False):
+ return _make_c_or_py_source(ffi, module_name, None, target_py_file,
+ verbose)
+
+def _modname_to_file(outputdir, modname, extension):
+ parts = modname.split('.')
+ try:
+ os.makedirs(os.path.join(outputdir, *parts[:-1]))
+ except OSError:
+ pass
+ parts[-1] += extension
+ return os.path.join(outputdir, *parts), parts
+
+
+# Aaargh. Distutils is not tested at all for the purpose of compiling
+# DLLs that are not extension modules. Here are some hacks to work
+# around that, in the _patch_for_*() functions...
+
+def _patch_meth(patchlist, cls, name, new_meth):
+ old = getattr(cls, name)
+ patchlist.append((cls, name, old))
+ setattr(cls, name, new_meth)
+ return old
+
+def _unpatch_meths(patchlist):
+ for cls, name, old_meth in reversed(patchlist):
+ setattr(cls, name, old_meth)
+
+def _patch_for_embedding(patchlist):
+ if sys.platform == 'win32':
+ # we must not remove the manifest when building for embedding!
+ # FUTURE: this module was removed in setuptools 74; this is likely dead code and should be removed,
+ # since the toolchain it supports (VS2005-2008) is also long dead.
+ from cffi._shimmed_dist_utils import MSVCCompiler
+ if MSVCCompiler is not None:
+ _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref',
+ lambda self, manifest_file: manifest_file)
+
+ if sys.platform == 'darwin':
+ # we must not make a '-bundle', but a '-dynamiclib' instead
+ from cffi._shimmed_dist_utils import CCompiler
+ def my_link_shared_object(self, *args, **kwds):
+ if '-bundle' in self.linker_so:
+ self.linker_so = list(self.linker_so)
+ i = self.linker_so.index('-bundle')
+ self.linker_so[i] = '-dynamiclib'
+ return old_link_shared_object(self, *args, **kwds)
+ old_link_shared_object = _patch_meth(patchlist, CCompiler,
+ 'link_shared_object',
+ my_link_shared_object)
+
+def _patch_for_target(patchlist, target):
+ from cffi._shimmed_dist_utils import build_ext
+ # if 'target' is different from '*', we need to patch some internal
+ # method to just return this 'target' value, instead of having it
+ # built from module_name
+ if target.endswith('.*'):
+ target = target[:-2]
+ if sys.platform == 'win32':
+ target += '.dll'
+ elif sys.platform == 'darwin':
+ target += '.dylib'
+ else:
+ target += '.so'
+ _patch_meth(patchlist, build_ext, 'get_ext_filename',
+ lambda self, ext_name: target)
+
+
+def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True,
+ c_file=None, source_extension='.c', extradir=None,
+ compiler_verbose=1, target=None, debug=None,
+ uses_ffiplatform=True, **kwds):
+ if not isinstance(module_name, str):
+ module_name = module_name.encode('ascii')
+ if ffi._windows_unicode:
+ ffi._apply_windows_unicode(kwds)
+ if preamble is not None:
+ if call_c_compiler and _is_file_like(c_file):
+ raise TypeError("Writing to file-like objects is not supported "
+ "with call_c_compiler=True")
+ embedding = (ffi._embedding is not None)
+ if embedding:
+ ffi._apply_embedding_fix(kwds)
+ if c_file is None:
+ c_file, parts = _modname_to_file(tmpdir, module_name,
+ source_extension)
+ if extradir:
+ parts = [extradir] + parts
+ ext_c_file = os.path.join(*parts)
+ else:
+ ext_c_file = c_file
+ #
+ if target is None:
+ if embedding:
+ target = '%s.*' % module_name
+ else:
+ target = '*'
+ #
+ if uses_ffiplatform:
+ ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds)
+ else:
+ ext = None
+ updated = make_c_source(ffi, module_name, preamble, c_file,
+ verbose=compiler_verbose)
+ if call_c_compiler:
+ patchlist = []
+ cwd = os.getcwd()
+ try:
+ if embedding:
+ _patch_for_embedding(patchlist)
+ if target != '*':
+ _patch_for_target(patchlist, target)
+ if compiler_verbose:
+ if tmpdir == '.':
+ msg = 'the current directory is'
+ else:
+ msg = 'setting the current directory to'
+ print('%s %r' % (msg, os.path.abspath(tmpdir)))
+ os.chdir(tmpdir)
+ outputfilename = ffiplatform.compile('.', ext,
+ compiler_verbose, debug)
+ finally:
+ os.chdir(cwd)
+ _unpatch_meths(patchlist)
+ return outputfilename
+ else:
+ return ext, updated
+ else:
+ if c_file is None:
+ c_file, _ = _modname_to_file(tmpdir, module_name, '.py')
+ updated = make_py_source(ffi, module_name, c_file,
+ verbose=compiler_verbose)
+ if call_c_compiler:
+ return c_file
+ else:
+ return None, updated
+
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/setuptools_ext.py b/Backend/venv/lib/python3.12/site-packages/cffi/setuptools_ext.py
new file mode 100644
index 00000000..5cdd246f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/setuptools_ext.py
@@ -0,0 +1,229 @@
+import os
+import sys
+import sysconfig
+
+try:
+ basestring
+except NameError:
+ # Python 3.x
+ basestring = str
+
+def error(msg):
+ from cffi._shimmed_dist_utils import DistutilsSetupError
+ raise DistutilsSetupError(msg)
+
+
+def execfile(filename, glob):
+ # We use execfile() (here rewritten for Python 3) instead of
+ # __import__() to load the build script. The problem with
+ # a normal import is that in some packages, the intermediate
+ # __init__.py files may already try to import the file that
+ # we are generating.
+ with open(filename) as f:
+ src = f.read()
+ src += '\n' # Python 2.6 compatibility
+ code = compile(src, filename, 'exec')
+ exec(code, glob, glob)
+
+
+def add_cffi_module(dist, mod_spec):
+ from cffi.api import FFI
+
+ if not isinstance(mod_spec, basestring):
+ error("argument to 'cffi_modules=...' must be a str or a list of str,"
+ " not %r" % (type(mod_spec).__name__,))
+ mod_spec = str(mod_spec)
+ try:
+ build_file_name, ffi_var_name = mod_spec.split(':')
+ except ValueError:
+ error("%r must be of the form 'path/build.py:ffi_variable'" %
+ (mod_spec,))
+ if not os.path.exists(build_file_name):
+ ext = ''
+ rewritten = build_file_name.replace('.', '/') + '.py'
+ if os.path.exists(rewritten):
+ ext = ' (rewrite cffi_modules to [%r])' % (
+ rewritten + ':' + ffi_var_name,)
+ error("%r does not name an existing file%s" % (build_file_name, ext))
+
+ mod_vars = {'__name__': '__cffi__', '__file__': build_file_name}
+ execfile(build_file_name, mod_vars)
+
+ try:
+ ffi = mod_vars[ffi_var_name]
+ except KeyError:
+ error("%r: object %r not found in module" % (mod_spec,
+ ffi_var_name))
+ if not isinstance(ffi, FFI):
+ ffi = ffi() # maybe it's a function instead of directly an ffi
+ if not isinstance(ffi, FFI):
+ error("%r is not an FFI instance (got %r)" % (mod_spec,
+ type(ffi).__name__))
+ if not hasattr(ffi, '_assigned_source'):
+ error("%r: the set_source() method was not called" % (mod_spec,))
+ module_name, source, source_extension, kwds = ffi._assigned_source
+ if ffi._windows_unicode:
+ kwds = kwds.copy()
+ ffi._apply_windows_unicode(kwds)
+
+ if source is None:
+ _add_py_module(dist, ffi, module_name)
+ else:
+ _add_c_module(dist, ffi, module_name, source, source_extension, kwds)
+
+def _set_py_limited_api(Extension, kwds):
+ """
+ Add py_limited_api to kwds if setuptools >= 26 is in use.
+ Do not alter the setting if it already exists.
+ Setuptools takes care of ignoring the flag on Python 2 and PyPy.
+
+ CPython itself should ignore the flag in a debugging version
+ (by not listing .abi3.so in the extensions it supports), but
+ it doesn't so far, creating troubles. That's why we check
+ for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent
+ of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401)
+
+ On Windows, with CPython <= 3.4, it's better not to use py_limited_api
+ because virtualenv *still* doesn't copy PYTHON3.DLL on these versions.
+ Recently (2020) we started shipping only >= 3.5 wheels, though. So
+ we'll give it another try and set py_limited_api on Windows >= 3.5.
+ """
+ from cffi._shimmed_dist_utils import log
+ from cffi import recompiler
+
+ if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount')
+ and recompiler.USE_LIMITED_API):
+ import setuptools
+ try:
+ setuptools_major_version = int(setuptools.__version__.partition('.')[0])
+ if setuptools_major_version >= 26:
+ kwds['py_limited_api'] = True
+ except ValueError: # certain development versions of setuptools
+ # If we don't know the version number of setuptools, we
+ # try to set 'py_limited_api' anyway. At worst, we get a
+ # warning.
+ kwds['py_limited_api'] = True
+
+ if sysconfig.get_config_var("Py_GIL_DISABLED"):
+ if kwds.get('py_limited_api'):
+ log.info("Ignoring py_limited_api=True for free-threaded build.")
+
+ kwds['py_limited_api'] = False
+
+ if kwds.get('py_limited_api') is False:
+ # avoid setting Py_LIMITED_API if py_limited_api=False
+ # which _cffi_include.h does unless _CFFI_NO_LIMITED_API is defined
+ kwds.setdefault("define_macros", []).append(("_CFFI_NO_LIMITED_API", None))
+ return kwds
+
+def _add_c_module(dist, ffi, module_name, source, source_extension, kwds):
+ # We are a setuptools extension. Need this build_ext for py_limited_api.
+ from setuptools.command.build_ext import build_ext
+ from cffi._shimmed_dist_utils import Extension, log, mkpath
+ from cffi import recompiler
+
+ allsources = ['$PLACEHOLDER']
+ allsources.extend(kwds.pop('sources', []))
+ kwds = _set_py_limited_api(Extension, kwds)
+ ext = Extension(name=module_name, sources=allsources, **kwds)
+
+ def make_mod(tmpdir, pre_run=None):
+ c_file = os.path.join(tmpdir, module_name + source_extension)
+ log.info("generating cffi module %r" % c_file)
+ mkpath(tmpdir)
+ # a setuptools-only, API-only hook: called with the "ext" and "ffi"
+ # arguments just before we turn the ffi into C code. To use it,
+ # subclass the 'distutils.command.build_ext.build_ext' class and
+ # add a method 'def pre_run(self, ext, ffi)'.
+ if pre_run is not None:
+ pre_run(ext, ffi)
+ updated = recompiler.make_c_source(ffi, module_name, source, c_file)
+ if not updated:
+ log.info("already up-to-date")
+ return c_file
+
+ if dist.ext_modules is None:
+ dist.ext_modules = []
+ dist.ext_modules.append(ext)
+
+ base_class = dist.cmdclass.get('build_ext', build_ext)
+ class build_ext_make_mod(base_class):
+ def run(self):
+ if ext.sources[0] == '$PLACEHOLDER':
+ pre_run = getattr(self, 'pre_run', None)
+ ext.sources[0] = make_mod(self.build_temp, pre_run)
+ base_class.run(self)
+ dist.cmdclass['build_ext'] = build_ext_make_mod
+ # NB. multiple runs here will create multiple 'build_ext_make_mod'
+ # classes. Even in this case the 'build_ext' command should be
+ # run once; but just in case, the logic above does nothing if
+ # called again.
+
+
+def _add_py_module(dist, ffi, module_name):
+ from setuptools.command.build_py import build_py
+ from setuptools.command.build_ext import build_ext
+ from cffi._shimmed_dist_utils import log, mkpath
+ from cffi import recompiler
+
+ def generate_mod(py_file):
+ log.info("generating cffi module %r" % py_file)
+ mkpath(os.path.dirname(py_file))
+ updated = recompiler.make_py_source(ffi, module_name, py_file)
+ if not updated:
+ log.info("already up-to-date")
+
+ base_class = dist.cmdclass.get('build_py', build_py)
+ class build_py_make_mod(base_class):
+ def run(self):
+ base_class.run(self)
+ module_path = module_name.split('.')
+ module_path[-1] += '.py'
+ generate_mod(os.path.join(self.build_lib, *module_path))
+ def get_source_files(self):
+ # This is called from 'setup.py sdist' only. Exclude
+ # the generate .py module in this case.
+ saved_py_modules = self.py_modules
+ try:
+ if saved_py_modules:
+ self.py_modules = [m for m in saved_py_modules
+ if m != module_name]
+ return base_class.get_source_files(self)
+ finally:
+ self.py_modules = saved_py_modules
+ dist.cmdclass['build_py'] = build_py_make_mod
+
+ # distutils and setuptools have no notion I could find of a
+ # generated python module. If we don't add module_name to
+ # dist.py_modules, then things mostly work but there are some
+ # combination of options (--root and --record) that will miss
+ # the module. So we add it here, which gives a few apparently
+ # harmless warnings about not finding the file outside the
+ # build directory.
+ # Then we need to hack more in get_source_files(); see above.
+ if dist.py_modules is None:
+ dist.py_modules = []
+ dist.py_modules.append(module_name)
+
+ # the following is only for "build_ext -i"
+ base_class_2 = dist.cmdclass.get('build_ext', build_ext)
+ class build_ext_make_mod(base_class_2):
+ def run(self):
+ base_class_2.run(self)
+ if self.inplace:
+ # from get_ext_fullpath() in distutils/command/build_ext.py
+ module_path = module_name.split('.')
+ package = '.'.join(module_path[:-1])
+ build_py = self.get_finalized_command('build_py')
+ package_dir = build_py.get_package_dir(package)
+ file_name = module_path[-1] + '.py'
+ generate_mod(os.path.join(package_dir, file_name))
+ dist.cmdclass['build_ext'] = build_ext_make_mod
+
+def cffi_modules(dist, attr, value):
+ assert attr == 'cffi_modules'
+ if isinstance(value, basestring):
+ value = [value]
+
+ for cffi_module in value:
+ add_cffi_module(dist, cffi_module)
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/vengine_cpy.py b/Backend/venv/lib/python3.12/site-packages/cffi/vengine_cpy.py
new file mode 100644
index 00000000..02e6a471
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/vengine_cpy.py
@@ -0,0 +1,1087 @@
+#
+# DEPRECATED: implementation for ffi.verify()
+#
+import sys
+from . import model
+from .error import VerificationError
+from . import _imp_emulation as imp
+
+
+class VCPythonEngine(object):
+ _class_key = 'x'
+ _gen_python_module = True
+
+ def __init__(self, verifier):
+ self.verifier = verifier
+ self.ffi = verifier.ffi
+ self._struct_pending_verification = {}
+ self._types_of_builtin_functions = {}
+
+ def patch_extension_kwds(self, kwds):
+ pass
+
+ def find_module(self, module_name, path, so_suffixes):
+ try:
+ f, filename, descr = imp.find_module(module_name, path)
+ except ImportError:
+ return None
+ if f is not None:
+ f.close()
+ # Note that after a setuptools installation, there are both .py
+ # and .so files with the same basename. The code here relies on
+ # imp.find_module() locating the .so in priority.
+ if descr[0] not in so_suffixes:
+ return None
+ return filename
+
+ def collect_types(self):
+ self._typesdict = {}
+ self._generate("collecttype")
+
+ def _prnt(self, what=''):
+ self._f.write(what + '\n')
+
+ def _gettypenum(self, type):
+ # a KeyError here is a bug. please report it! :-)
+ return self._typesdict[type]
+
+ def _do_collect_type(self, tp):
+ if ((not isinstance(tp, model.PrimitiveType)
+ or tp.name == 'long double')
+ and tp not in self._typesdict):
+ num = len(self._typesdict)
+ self._typesdict[tp] = num
+
+ def write_source_to_f(self):
+ self.collect_types()
+ #
+ # The new module will have a _cffi_setup() function that receives
+ # objects from the ffi world, and that calls some setup code in
+ # the module. This setup code is split in several independent
+ # functions, e.g. one per constant. The functions are "chained"
+ # by ending in a tail call to each other.
+ #
+ # This is further split in two chained lists, depending on if we
+ # can do it at import-time or if we must wait for _cffi_setup() to
+ # provide us with the objects. This is needed because we
+ # need the values of the enum constants in order to build the
+ # that we may have to pass to _cffi_setup().
+ #
+ # The following two 'chained_list_constants' items contains
+ # the head of these two chained lists, as a string that gives the
+ # call to do, if any.
+ self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)']
+ #
+ prnt = self._prnt
+ # first paste some standard set of lines that are mostly '#define'
+ prnt(cffimod_header)
+ prnt()
+ # then paste the C source given by the user, verbatim.
+ prnt(self.verifier.preamble)
+ prnt()
+ #
+ # call generate_cpy_xxx_decl(), for every xxx found from
+ # ffi._parser._declarations. This generates all the functions.
+ self._generate("decl")
+ #
+ # implement the function _cffi_setup_custom() as calling the
+ # head of the chained list.
+ self._generate_setup_custom()
+ prnt()
+ #
+ # produce the method table, including the entries for the
+ # generated Python->C function wrappers, which are done
+ # by generate_cpy_function_method().
+ prnt('static PyMethodDef _cffi_methods[] = {')
+ self._generate("method")
+ prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},')
+ prnt(' {NULL, NULL, 0, NULL} /* Sentinel */')
+ prnt('};')
+ prnt()
+ #
+ # standard init.
+ modname = self.verifier.get_module_name()
+ constants = self._chained_list_constants[False]
+ prnt('#if PY_MAJOR_VERSION >= 3')
+ prnt()
+ prnt('static struct PyModuleDef _cffi_module_def = {')
+ prnt(' PyModuleDef_HEAD_INIT,')
+ prnt(' "%s",' % modname)
+ prnt(' NULL,')
+ prnt(' -1,')
+ prnt(' _cffi_methods,')
+ prnt(' NULL, NULL, NULL, NULL')
+ prnt('};')
+ prnt()
+ prnt('PyMODINIT_FUNC')
+ prnt('PyInit_%s(void)' % modname)
+ prnt('{')
+ prnt(' PyObject *lib;')
+ prnt(' lib = PyModule_Create(&_cffi_module_def);')
+ prnt(' if (lib == NULL)')
+ prnt(' return NULL;')
+ prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,))
+ prnt(' Py_DECREF(lib);')
+ prnt(' return NULL;')
+ prnt(' }')
+ prnt('#if Py_GIL_DISABLED')
+ prnt(' PyUnstable_Module_SetGIL(lib, Py_MOD_GIL_NOT_USED);')
+ prnt('#endif')
+ prnt(' return lib;')
+ prnt('}')
+ prnt()
+ prnt('#else')
+ prnt()
+ prnt('PyMODINIT_FUNC')
+ prnt('init%s(void)' % modname)
+ prnt('{')
+ prnt(' PyObject *lib;')
+ prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname)
+ prnt(' if (lib == NULL)')
+ prnt(' return;')
+ prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,))
+ prnt(' return;')
+ prnt(' return;')
+ prnt('}')
+ prnt()
+ prnt('#endif')
+
+ def load_library(self, flags=None):
+ # XXX review all usages of 'self' here!
+ # import it as a new extension module
+ imp.acquire_lock()
+ try:
+ if hasattr(sys, "getdlopenflags"):
+ previous_flags = sys.getdlopenflags()
+ try:
+ if hasattr(sys, "setdlopenflags") and flags is not None:
+ sys.setdlopenflags(flags)
+ module = imp.load_dynamic(self.verifier.get_module_name(),
+ self.verifier.modulefilename)
+ except ImportError as e:
+ error = "importing %r: %s" % (self.verifier.modulefilename, e)
+ raise VerificationError(error)
+ finally:
+ if hasattr(sys, "setdlopenflags"):
+ sys.setdlopenflags(previous_flags)
+ finally:
+ imp.release_lock()
+ #
+ # call loading_cpy_struct() to get the struct layout inferred by
+ # the C compiler
+ self._load(module, 'loading')
+ #
+ # the C code will need the objects. Collect them in
+ # order in a list.
+ revmapping = dict([(value, key)
+ for (key, value) in self._typesdict.items()])
+ lst = [revmapping[i] for i in range(len(revmapping))]
+ lst = list(map(self.ffi._get_cached_btype, lst))
+ #
+ # build the FFILibrary class and instance and call _cffi_setup().
+ # this will set up some fields like '_cffi_types', and only then
+ # it will invoke the chained list of functions that will really
+ # build (notably) the constant objects, as if they are
+ # pointers, and store them as attributes on the 'library' object.
+ class FFILibrary(object):
+ _cffi_python_module = module
+ _cffi_ffi = self.ffi
+ _cffi_dir = []
+ def __dir__(self):
+ return FFILibrary._cffi_dir + list(self.__dict__)
+ library = FFILibrary()
+ if module._cffi_setup(lst, VerificationError, library):
+ import warnings
+ warnings.warn("reimporting %r might overwrite older definitions"
+ % (self.verifier.get_module_name()))
+ #
+ # finally, call the loaded_cpy_xxx() functions. This will perform
+ # the final adjustments, like copying the Python->C wrapper
+ # functions from the module to the 'library' object, and setting
+ # up the FFILibrary class with properties for the global C variables.
+ self._load(module, 'loaded', library=library)
+ module._cffi_original_ffi = self.ffi
+ module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions
+ return library
+
+ def _get_declarations(self):
+ lst = [(key, tp) for (key, (tp, qual)) in
+ self.ffi._parser._declarations.items()]
+ lst.sort()
+ return lst
+
+ def _generate(self, step_name):
+ for name, tp in self._get_declarations():
+ kind, realname = name.split(' ', 1)
+ try:
+ method = getattr(self, '_generate_cpy_%s_%s' % (kind,
+ step_name))
+ except AttributeError:
+ raise VerificationError(
+ "not implemented in verify(): %r" % name)
+ try:
+ method(tp, realname)
+ except Exception as e:
+ model.attach_exception_info(e, name)
+ raise
+
+ def _load(self, module, step_name, **kwds):
+ for name, tp in self._get_declarations():
+ kind, realname = name.split(' ', 1)
+ method = getattr(self, '_%s_cpy_%s' % (step_name, kind))
+ try:
+ method(tp, realname, module, **kwds)
+ except Exception as e:
+ model.attach_exception_info(e, name)
+ raise
+
+ def _generate_nothing(self, tp, name):
+ pass
+
+ def _loaded_noop(self, tp, name, module, **kwds):
+ pass
+
+ # ----------
+
+ def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode):
+ extraarg = ''
+ if isinstance(tp, model.PrimitiveType):
+ if tp.is_integer_type() and tp.name != '_Bool':
+ converter = '_cffi_to_c_int'
+ extraarg = ', %s' % tp.name
+ elif tp.is_complex_type():
+ raise VerificationError(
+ "not implemented in verify(): complex types")
+ else:
+ converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''),
+ tp.name.replace(' ', '_'))
+ errvalue = '-1'
+ #
+ elif isinstance(tp, model.PointerType):
+ self._convert_funcarg_to_c_ptr_or_array(tp, fromvar,
+ tovar, errcode)
+ return
+ #
+ elif isinstance(tp, (model.StructOrUnion, model.EnumType)):
+ # a struct (not a struct pointer) as a function argument
+ self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)'
+ % (tovar, self._gettypenum(tp), fromvar))
+ self._prnt(' %s;' % errcode)
+ return
+ #
+ elif isinstance(tp, model.FunctionPtrType):
+ converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('')
+ extraarg = ', _cffi_type(%d)' % self._gettypenum(tp)
+ errvalue = 'NULL'
+ #
+ else:
+ raise NotImplementedError(tp)
+ #
+ self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg))
+ self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % (
+ tovar, tp.get_c_name(''), errvalue))
+ self._prnt(' %s;' % errcode)
+
+ def _extra_local_variables(self, tp, localvars, freelines):
+ if isinstance(tp, model.PointerType):
+ localvars.add('Py_ssize_t datasize')
+ localvars.add('struct _cffi_freeme_s *large_args_free = NULL')
+ freelines.add('if (large_args_free != NULL)'
+ ' _cffi_free_array_arguments(large_args_free);')
+
+ def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode):
+ self._prnt(' datasize = _cffi_prepare_pointer_call_argument(')
+ self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % (
+ self._gettypenum(tp), fromvar, tovar))
+ self._prnt(' if (datasize != 0) {')
+ self._prnt(' %s = ((size_t)datasize) <= 640 ? '
+ 'alloca((size_t)datasize) : NULL;' % (tovar,))
+ self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, '
+ '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar))
+ self._prnt(' datasize, &large_args_free) < 0)')
+ self._prnt(' %s;' % errcode)
+ self._prnt(' }')
+
+ def _convert_expr_from_c(self, tp, var, context):
+ if isinstance(tp, model.PrimitiveType):
+ if tp.is_integer_type() and tp.name != '_Bool':
+ return '_cffi_from_c_int(%s, %s)' % (var, tp.name)
+ elif tp.name != 'long double':
+ return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var)
+ else:
+ return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
+ var, self._gettypenum(tp))
+ elif isinstance(tp, (model.PointerType, model.FunctionPtrType)):
+ return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
+ var, self._gettypenum(tp))
+ elif isinstance(tp, model.ArrayType):
+ return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
+ var, self._gettypenum(model.PointerType(tp.item)))
+ elif isinstance(tp, model.StructOrUnion):
+ if tp.fldnames is None:
+ raise TypeError("'%s' is used as %s, but is opaque" % (
+ tp._get_c_name(), context))
+ return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % (
+ var, self._gettypenum(tp))
+ elif isinstance(tp, model.EnumType):
+ return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
+ var, self._gettypenum(tp))
+ else:
+ raise NotImplementedError(tp)
+
+ # ----------
+ # typedefs: generates no code so far
+
+ _generate_cpy_typedef_collecttype = _generate_nothing
+ _generate_cpy_typedef_decl = _generate_nothing
+ _generate_cpy_typedef_method = _generate_nothing
+ _loading_cpy_typedef = _loaded_noop
+ _loaded_cpy_typedef = _loaded_noop
+
+ # ----------
+ # function declarations
+
+ def _generate_cpy_function_collecttype(self, tp, name):
+ assert isinstance(tp, model.FunctionPtrType)
+ if tp.ellipsis:
+ self._do_collect_type(tp)
+ else:
+ # don't call _do_collect_type(tp) in this common case,
+ # otherwise test_autofilled_struct_as_argument fails
+ for type in tp.args:
+ self._do_collect_type(type)
+ self._do_collect_type(tp.result)
+
+ def _generate_cpy_function_decl(self, tp, name):
+ assert isinstance(tp, model.FunctionPtrType)
+ if tp.ellipsis:
+ # cannot support vararg functions better than this: check for its
+ # exact type (including the fixed arguments), and build it as a
+ # constant function pointer (no CPython wrapper)
+ self._generate_cpy_const(False, name, tp)
+ return
+ prnt = self._prnt
+ numargs = len(tp.args)
+ if numargs == 0:
+ argname = 'noarg'
+ elif numargs == 1:
+ argname = 'arg0'
+ else:
+ argname = 'args'
+ prnt('static PyObject *')
+ prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname))
+ prnt('{')
+ #
+ context = 'argument of %s' % name
+ for i, type in enumerate(tp.args):
+ prnt(' %s;' % type.get_c_name(' x%d' % i, context))
+ #
+ localvars = set()
+ freelines = set()
+ for type in tp.args:
+ self._extra_local_variables(type, localvars, freelines)
+ for decl in sorted(localvars):
+ prnt(' %s;' % (decl,))
+ #
+ if not isinstance(tp.result, model.VoidType):
+ result_code = 'result = '
+ context = 'result of %s' % name
+ prnt(' %s;' % tp.result.get_c_name(' result', context))
+ prnt(' PyObject *pyresult;')
+ else:
+ result_code = ''
+ #
+ if len(tp.args) > 1:
+ rng = range(len(tp.args))
+ for i in rng:
+ prnt(' PyObject *arg%d;' % i)
+ prnt()
+ prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % (
+ 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng])))
+ prnt(' return NULL;')
+ prnt()
+ #
+ for i, type in enumerate(tp.args):
+ self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i,
+ 'return NULL')
+ prnt()
+ #
+ prnt(' Py_BEGIN_ALLOW_THREADS')
+ prnt(' _cffi_restore_errno();')
+ prnt(' { %s%s(%s); }' % (
+ result_code, name,
+ ', '.join(['x%d' % i for i in range(len(tp.args))])))
+ prnt(' _cffi_save_errno();')
+ prnt(' Py_END_ALLOW_THREADS')
+ prnt()
+ #
+ prnt(' (void)self; /* unused */')
+ if numargs == 0:
+ prnt(' (void)noarg; /* unused */')
+ if result_code:
+ prnt(' pyresult = %s;' %
+ self._convert_expr_from_c(tp.result, 'result', 'result type'))
+ for freeline in freelines:
+ prnt(' ' + freeline)
+ prnt(' return pyresult;')
+ else:
+ for freeline in freelines:
+ prnt(' ' + freeline)
+ prnt(' Py_INCREF(Py_None);')
+ prnt(' return Py_None;')
+ prnt('}')
+ prnt()
+
+ def _generate_cpy_function_method(self, tp, name):
+ if tp.ellipsis:
+ return
+ numargs = len(tp.args)
+ if numargs == 0:
+ meth = 'METH_NOARGS'
+ elif numargs == 1:
+ meth = 'METH_O'
+ else:
+ meth = 'METH_VARARGS'
+ self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth))
+
+ _loading_cpy_function = _loaded_noop
+
+ def _loaded_cpy_function(self, tp, name, module, library):
+ if tp.ellipsis:
+ return
+ func = getattr(module, name)
+ setattr(library, name, func)
+ self._types_of_builtin_functions[func] = tp
+
+ # ----------
+ # named structs
+
+ _generate_cpy_struct_collecttype = _generate_nothing
+ def _generate_cpy_struct_decl(self, tp, name):
+ assert name == tp.name
+ self._generate_struct_or_union_decl(tp, 'struct', name)
+ def _generate_cpy_struct_method(self, tp, name):
+ self._generate_struct_or_union_method(tp, 'struct', name)
+ def _loading_cpy_struct(self, tp, name, module):
+ self._loading_struct_or_union(tp, 'struct', name, module)
+ def _loaded_cpy_struct(self, tp, name, module, **kwds):
+ self._loaded_struct_or_union(tp)
+
+ _generate_cpy_union_collecttype = _generate_nothing
+ def _generate_cpy_union_decl(self, tp, name):
+ assert name == tp.name
+ self._generate_struct_or_union_decl(tp, 'union', name)
+ def _generate_cpy_union_method(self, tp, name):
+ self._generate_struct_or_union_method(tp, 'union', name)
+ def _loading_cpy_union(self, tp, name, module):
+ self._loading_struct_or_union(tp, 'union', name, module)
+ def _loaded_cpy_union(self, tp, name, module, **kwds):
+ self._loaded_struct_or_union(tp)
+
+ def _generate_struct_or_union_decl(self, tp, prefix, name):
+ if tp.fldnames is None:
+ return # nothing to do with opaque structs
+ checkfuncname = '_cffi_check_%s_%s' % (prefix, name)
+ layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
+ cname = ('%s %s' % (prefix, name)).strip()
+ #
+ prnt = self._prnt
+ prnt('static void %s(%s *p)' % (checkfuncname, cname))
+ prnt('{')
+ prnt(' /* only to generate compile-time warnings or errors */')
+ prnt(' (void)p;')
+ for fname, ftype, fbitsize, fqual in tp.enumfields():
+ if (isinstance(ftype, model.PrimitiveType)
+ and ftype.is_integer_type()) or fbitsize >= 0:
+ # accept all integers, but complain on float or double
+ prnt(' (void)((p->%s) << 1);' % fname)
+ else:
+ # only accept exactly the type declared.
+ try:
+ prnt(' { %s = &p->%s; (void)tmp; }' % (
+ ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
+ fname))
+ except VerificationError as e:
+ prnt(' /* %s */' % str(e)) # cannot verify it, ignore
+ prnt('}')
+ prnt('static PyObject *')
+ prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,))
+ prnt('{')
+ prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname)
+ prnt(' static Py_ssize_t nums[] = {')
+ prnt(' sizeof(%s),' % cname)
+ prnt(' offsetof(struct _cffi_aligncheck, y),')
+ for fname, ftype, fbitsize, fqual in tp.enumfields():
+ if fbitsize >= 0:
+ continue # xxx ignore fbitsize for now
+ prnt(' offsetof(%s, %s),' % (cname, fname))
+ if isinstance(ftype, model.ArrayType) and ftype.length is None:
+ prnt(' 0, /* %s */' % ftype._get_c_name())
+ else:
+ prnt(' sizeof(((%s *)0)->%s),' % (cname, fname))
+ prnt(' -1')
+ prnt(' };')
+ prnt(' (void)self; /* unused */')
+ prnt(' (void)noarg; /* unused */')
+ prnt(' return _cffi_get_struct_layout(nums);')
+ prnt(' /* the next line is not executed, but compiled */')
+ prnt(' %s(0);' % (checkfuncname,))
+ prnt('}')
+ prnt()
+
+ def _generate_struct_or_union_method(self, tp, prefix, name):
+ if tp.fldnames is None:
+ return # nothing to do with opaque structs
+ layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
+ self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname,
+ layoutfuncname))
+
+ def _loading_struct_or_union(self, tp, prefix, name, module):
+ if tp.fldnames is None:
+ return # nothing to do with opaque structs
+ layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
+ #
+ function = getattr(module, layoutfuncname)
+ layout = function()
+ if isinstance(tp, model.StructOrUnion) and tp.partial:
+ # use the function()'s sizes and offsets to guide the
+ # layout of the struct
+ totalsize = layout[0]
+ totalalignment = layout[1]
+ fieldofs = layout[2::2]
+ fieldsize = layout[3::2]
+ tp.force_flatten()
+ assert len(fieldofs) == len(fieldsize) == len(tp.fldnames)
+ tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
+ else:
+ cname = ('%s %s' % (prefix, name)).strip()
+ self._struct_pending_verification[tp] = layout, cname
+
+ def _loaded_struct_or_union(self, tp):
+ if tp.fldnames is None:
+ return # nothing to do with opaque structs
+ self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered
+
+ if tp in self._struct_pending_verification:
+ # check that the layout sizes and offsets match the real ones
+ def check(realvalue, expectedvalue, msg):
+ if realvalue != expectedvalue:
+ raise VerificationError(
+ "%s (we have %d, but C compiler says %d)"
+ % (msg, expectedvalue, realvalue))
+ ffi = self.ffi
+ BStruct = ffi._get_cached_btype(tp)
+ layout, cname = self._struct_pending_verification.pop(tp)
+ check(layout[0], ffi.sizeof(BStruct), "wrong total size")
+ check(layout[1], ffi.alignof(BStruct), "wrong total alignment")
+ i = 2
+ for fname, ftype, fbitsize, fqual in tp.enumfields():
+ if fbitsize >= 0:
+ continue # xxx ignore fbitsize for now
+ check(layout[i], ffi.offsetof(BStruct, fname),
+ "wrong offset for field %r" % (fname,))
+ if layout[i+1] != 0:
+ BField = ffi._get_cached_btype(ftype)
+ check(layout[i+1], ffi.sizeof(BField),
+ "wrong size for field %r" % (fname,))
+ i += 2
+ assert i == len(layout)
+
+ # ----------
+ # 'anonymous' declarations. These are produced for anonymous structs
+ # or unions; the 'name' is obtained by a typedef.
+
+ _generate_cpy_anonymous_collecttype = _generate_nothing
+
+ def _generate_cpy_anonymous_decl(self, tp, name):
+ if isinstance(tp, model.EnumType):
+ self._generate_cpy_enum_decl(tp, name, '')
+ else:
+ self._generate_struct_or_union_decl(tp, '', name)
+
+ def _generate_cpy_anonymous_method(self, tp, name):
+ if not isinstance(tp, model.EnumType):
+ self._generate_struct_or_union_method(tp, '', name)
+
+ def _loading_cpy_anonymous(self, tp, name, module):
+ if isinstance(tp, model.EnumType):
+ self._loading_cpy_enum(tp, name, module)
+ else:
+ self._loading_struct_or_union(tp, '', name, module)
+
+ def _loaded_cpy_anonymous(self, tp, name, module, **kwds):
+ if isinstance(tp, model.EnumType):
+ self._loaded_cpy_enum(tp, name, module, **kwds)
+ else:
+ self._loaded_struct_or_union(tp)
+
+ # ----------
+ # constants, likely declared with '#define'
+
+ def _generate_cpy_const(self, is_int, name, tp=None, category='const',
+ vartp=None, delayed=True, size_too=False,
+ check_value=None):
+ prnt = self._prnt
+ funcname = '_cffi_%s_%s' % (category, name)
+ prnt('static int %s(PyObject *lib)' % funcname)
+ prnt('{')
+ prnt(' PyObject *o;')
+ prnt(' int res;')
+ if not is_int:
+ prnt(' %s;' % (vartp or tp).get_c_name(' i', name))
+ else:
+ assert category == 'const'
+ #
+ if check_value is not None:
+ self._check_int_constant_value(name, check_value)
+ #
+ if not is_int:
+ if category == 'var':
+ realexpr = '&' + name
+ else:
+ realexpr = name
+ prnt(' i = (%s);' % (realexpr,))
+ prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i',
+ 'variable type'),))
+ assert delayed
+ else:
+ prnt(' o = _cffi_from_c_int_const(%s);' % name)
+ prnt(' if (o == NULL)')
+ prnt(' return -1;')
+ if size_too:
+ prnt(' {')
+ prnt(' PyObject *o1 = o;')
+ prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));'
+ % (name,))
+ prnt(' Py_DECREF(o1);')
+ prnt(' if (o == NULL)')
+ prnt(' return -1;')
+ prnt(' }')
+ prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name)
+ prnt(' Py_DECREF(o);')
+ prnt(' if (res < 0)')
+ prnt(' return -1;')
+ prnt(' return %s;' % self._chained_list_constants[delayed])
+ self._chained_list_constants[delayed] = funcname + '(lib)'
+ prnt('}')
+ prnt()
+
+ def _generate_cpy_constant_collecttype(self, tp, name):
+ is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
+ if not is_int:
+ self._do_collect_type(tp)
+
+ def _generate_cpy_constant_decl(self, tp, name):
+ is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
+ self._generate_cpy_const(is_int, name, tp)
+
+ _generate_cpy_constant_method = _generate_nothing
+ _loading_cpy_constant = _loaded_noop
+ _loaded_cpy_constant = _loaded_noop
+
+ # ----------
+ # enums
+
+ def _check_int_constant_value(self, name, value, err_prefix=''):
+ prnt = self._prnt
+ if value <= 0:
+ prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % (
+ name, name, value))
+ else:
+ prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
+ name, name, value))
+ prnt(' char buf[64];')
+ prnt(' if ((%s) <= 0)' % name)
+ prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name)
+ prnt(' else')
+ prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' %
+ name)
+ prnt(' PyErr_Format(_cffi_VerificationError,')
+ prnt(' "%s%s has the real value %s, not %s",')
+ prnt(' "%s", "%s", buf, "%d");' % (
+ err_prefix, name, value))
+ prnt(' return -1;')
+ prnt(' }')
+
+ def _enum_funcname(self, prefix, name):
+ # "$enum_$1" => "___D_enum____D_1"
+ name = name.replace('$', '___D_')
+ return '_cffi_e_%s_%s' % (prefix, name)
+
+ def _generate_cpy_enum_decl(self, tp, name, prefix='enum'):
+ if tp.partial:
+ for enumerator in tp.enumerators:
+ self._generate_cpy_const(True, enumerator, delayed=False)
+ return
+ #
+ funcname = self._enum_funcname(prefix, name)
+ prnt = self._prnt
+ prnt('static int %s(PyObject *lib)' % funcname)
+ prnt('{')
+ for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
+ self._check_int_constant_value(enumerator, enumvalue,
+ "enum %s: " % name)
+ prnt(' return %s;' % self._chained_list_constants[True])
+ self._chained_list_constants[True] = funcname + '(lib)'
+ prnt('}')
+ prnt()
+
+ _generate_cpy_enum_collecttype = _generate_nothing
+ _generate_cpy_enum_method = _generate_nothing
+
+ def _loading_cpy_enum(self, tp, name, module):
+ if tp.partial:
+ enumvalues = [getattr(module, enumerator)
+ for enumerator in tp.enumerators]
+ tp.enumvalues = tuple(enumvalues)
+ tp.partial_resolved = True
+
+ def _loaded_cpy_enum(self, tp, name, module, library):
+ for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
+ setattr(library, enumerator, enumvalue)
+
+ # ----------
+ # macros: for now only for integers
+
+ def _generate_cpy_macro_decl(self, tp, name):
+ if tp == '...':
+ check_value = None
+ else:
+ check_value = tp # an integer
+ self._generate_cpy_const(True, name, check_value=check_value)
+
+ _generate_cpy_macro_collecttype = _generate_nothing
+ _generate_cpy_macro_method = _generate_nothing
+ _loading_cpy_macro = _loaded_noop
+ _loaded_cpy_macro = _loaded_noop
+
+ # ----------
+ # global variables
+
+ def _generate_cpy_variable_collecttype(self, tp, name):
+ if isinstance(tp, model.ArrayType):
+ tp_ptr = model.PointerType(tp.item)
+ else:
+ tp_ptr = model.PointerType(tp)
+ self._do_collect_type(tp_ptr)
+
+ def _generate_cpy_variable_decl(self, tp, name):
+ if isinstance(tp, model.ArrayType):
+ tp_ptr = model.PointerType(tp.item)
+ self._generate_cpy_const(False, name, tp, vartp=tp_ptr,
+ size_too = tp.length_is_unknown())
+ else:
+ tp_ptr = model.PointerType(tp)
+ self._generate_cpy_const(False, name, tp_ptr, category='var')
+
+ _generate_cpy_variable_method = _generate_nothing
+ _loading_cpy_variable = _loaded_noop
+
+ def _loaded_cpy_variable(self, tp, name, module, library):
+ value = getattr(library, name)
+ if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the
+ # sense that "a=..." is forbidden
+ if tp.length_is_unknown():
+ assert isinstance(value, tuple)
+ (value, size) = value
+ BItemType = self.ffi._get_cached_btype(tp.item)
+ length, rest = divmod(size, self.ffi.sizeof(BItemType))
+ if rest != 0:
+ raise VerificationError(
+ "bad size: %r does not seem to be an array of %s" %
+ (name, tp.item))
+ tp = tp.resolve_length(length)
+ # 'value' is a which we have to replace with
+ # a if the N is actually known
+ if tp.length is not None:
+ BArray = self.ffi._get_cached_btype(tp)
+ value = self.ffi.cast(BArray, value)
+ setattr(library, name, value)
+ return
+ # remove ptr= from the library instance, and replace
+ # it by a property on the class, which reads/writes into ptr[0].
+ ptr = value
+ delattr(library, name)
+ def getter(library):
+ return ptr[0]
+ def setter(library, value):
+ ptr[0] = value
+ setattr(type(library), name, property(getter, setter))
+ type(library)._cffi_dir.append(name)
+
+ # ----------
+
+ def _generate_setup_custom(self):
+ prnt = self._prnt
+ prnt('static int _cffi_setup_custom(PyObject *lib)')
+ prnt('{')
+ prnt(' return %s;' % self._chained_list_constants[True])
+ prnt('}')
+
+cffimod_header = r'''
+#include
+#include
+
+/* this block of #ifs should be kept exactly identical between
+ c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
+ and cffi/_cffi_include.h */
+#if defined(_MSC_VER)
+# include /* for alloca() */
+# if _MSC_VER < 1600 /* MSVC < 2010 */
+ typedef __int8 int8_t;
+ typedef __int16 int16_t;
+ typedef __int32 int32_t;
+ typedef __int64 int64_t;
+ typedef unsigned __int8 uint8_t;
+ typedef unsigned __int16 uint16_t;
+ typedef unsigned __int32 uint32_t;
+ typedef unsigned __int64 uint64_t;
+ typedef __int8 int_least8_t;
+ typedef __int16 int_least16_t;
+ typedef __int32 int_least32_t;
+ typedef __int64 int_least64_t;
+ typedef unsigned __int8 uint_least8_t;
+ typedef unsigned __int16 uint_least16_t;
+ typedef unsigned __int32 uint_least32_t;
+ typedef unsigned __int64 uint_least64_t;
+ typedef __int8 int_fast8_t;
+ typedef __int16 int_fast16_t;
+ typedef __int32 int_fast32_t;
+ typedef __int64 int_fast64_t;
+ typedef unsigned __int8 uint_fast8_t;
+ typedef unsigned __int16 uint_fast16_t;
+ typedef unsigned __int32 uint_fast32_t;
+ typedef unsigned __int64 uint_fast64_t;
+ typedef __int64 intmax_t;
+ typedef unsigned __int64 uintmax_t;
+# else
+# include
+# endif
+# if _MSC_VER < 1800 /* MSVC < 2013 */
+# ifndef __cplusplus
+ typedef unsigned char _Bool;
+# endif
+# endif
+# define _cffi_float_complex_t _Fcomplex /* include for it */
+# define _cffi_double_complex_t _Dcomplex /* include for it */
+#else
+# include
+# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
+# include
+# endif
+# define _cffi_float_complex_t float _Complex
+# define _cffi_double_complex_t double _Complex
+#endif
+
+#if PY_MAJOR_VERSION < 3
+# undef PyCapsule_CheckExact
+# undef PyCapsule_GetPointer
+# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule))
+# define PyCapsule_GetPointer(capsule, name) \
+ (PyCObject_AsVoidPtr(capsule))
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+# define PyInt_FromLong PyLong_FromLong
+#endif
+
+#define _cffi_from_c_double PyFloat_FromDouble
+#define _cffi_from_c_float PyFloat_FromDouble
+#define _cffi_from_c_long PyInt_FromLong
+#define _cffi_from_c_ulong PyLong_FromUnsignedLong
+#define _cffi_from_c_longlong PyLong_FromLongLong
+#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
+#define _cffi_from_c__Bool PyBool_FromLong
+
+#define _cffi_to_c_double PyFloat_AsDouble
+#define _cffi_to_c_float PyFloat_AsDouble
+
+#define _cffi_from_c_int_const(x) \
+ (((x) > 0) ? \
+ ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \
+ PyInt_FromLong((long)(x)) : \
+ PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \
+ ((long long)(x) >= (long long)LONG_MIN) ? \
+ PyInt_FromLong((long)(x)) : \
+ PyLong_FromLongLong((long long)(x)))
+
+#define _cffi_from_c_int(x, type) \
+ (((type)-1) > 0 ? /* unsigned */ \
+ (sizeof(type) < sizeof(long) ? \
+ PyInt_FromLong((long)x) : \
+ sizeof(type) == sizeof(long) ? \
+ PyLong_FromUnsignedLong((unsigned long)x) : \
+ PyLong_FromUnsignedLongLong((unsigned long long)x)) : \
+ (sizeof(type) <= sizeof(long) ? \
+ PyInt_FromLong((long)x) : \
+ PyLong_FromLongLong((long long)x)))
+
+#define _cffi_to_c_int(o, type) \
+ ((type)( \
+ sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \
+ : (type)_cffi_to_c_i8(o)) : \
+ sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \
+ : (type)_cffi_to_c_i16(o)) : \
+ sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \
+ : (type)_cffi_to_c_i32(o)) : \
+ sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \
+ : (type)_cffi_to_c_i64(o)) : \
+ (Py_FatalError("unsupported size for type " #type), (type)0)))
+
+#define _cffi_to_c_i8 \
+ ((int(*)(PyObject *))_cffi_exports[1])
+#define _cffi_to_c_u8 \
+ ((int(*)(PyObject *))_cffi_exports[2])
+#define _cffi_to_c_i16 \
+ ((int(*)(PyObject *))_cffi_exports[3])
+#define _cffi_to_c_u16 \
+ ((int(*)(PyObject *))_cffi_exports[4])
+#define _cffi_to_c_i32 \
+ ((int(*)(PyObject *))_cffi_exports[5])
+#define _cffi_to_c_u32 \
+ ((unsigned int(*)(PyObject *))_cffi_exports[6])
+#define _cffi_to_c_i64 \
+ ((long long(*)(PyObject *))_cffi_exports[7])
+#define _cffi_to_c_u64 \
+ ((unsigned long long(*)(PyObject *))_cffi_exports[8])
+#define _cffi_to_c_char \
+ ((int(*)(PyObject *))_cffi_exports[9])
+#define _cffi_from_c_pointer \
+ ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10])
+#define _cffi_to_c_pointer \
+ ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11])
+#define _cffi_get_struct_layout \
+ ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12])
+#define _cffi_restore_errno \
+ ((void(*)(void))_cffi_exports[13])
+#define _cffi_save_errno \
+ ((void(*)(void))_cffi_exports[14])
+#define _cffi_from_c_char \
+ ((PyObject *(*)(char))_cffi_exports[15])
+#define _cffi_from_c_deref \
+ ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16])
+#define _cffi_to_c \
+ ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17])
+#define _cffi_from_c_struct \
+ ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18])
+#define _cffi_to_c_wchar_t \
+ ((wchar_t(*)(PyObject *))_cffi_exports[19])
+#define _cffi_from_c_wchar_t \
+ ((PyObject *(*)(wchar_t))_cffi_exports[20])
+#define _cffi_to_c_long_double \
+ ((long double(*)(PyObject *))_cffi_exports[21])
+#define _cffi_to_c__Bool \
+ ((_Bool(*)(PyObject *))_cffi_exports[22])
+#define _cffi_prepare_pointer_call_argument \
+ ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23])
+#define _cffi_convert_array_from_object \
+ ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24])
+#define _CFFI_NUM_EXPORTS 25
+
+typedef struct _ctypedescr CTypeDescrObject;
+
+static void *_cffi_exports[_CFFI_NUM_EXPORTS];
+static PyObject *_cffi_types, *_cffi_VerificationError;
+
+static int _cffi_setup_custom(PyObject *lib); /* forward */
+
+static PyObject *_cffi_setup(PyObject *self, PyObject *args)
+{
+ PyObject *library;
+ int was_alive = (_cffi_types != NULL);
+ (void)self; /* unused */
+ if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError,
+ &library))
+ return NULL;
+ Py_INCREF(_cffi_types);
+ Py_INCREF(_cffi_VerificationError);
+ if (_cffi_setup_custom(library) < 0)
+ return NULL;
+ return PyBool_FromLong(was_alive);
+}
+
+union _cffi_union_alignment_u {
+ unsigned char m_char;
+ unsigned short m_short;
+ unsigned int m_int;
+ unsigned long m_long;
+ unsigned long long m_longlong;
+ float m_float;
+ double m_double;
+ long double m_longdouble;
+};
+
+struct _cffi_freeme_s {
+ struct _cffi_freeme_s *next;
+ union _cffi_union_alignment_u alignment;
+};
+
+#ifdef __GNUC__
+ __attribute__((unused))
+#endif
+static int _cffi_convert_array_argument(CTypeDescrObject *ctptr, PyObject *arg,
+ char **output_data, Py_ssize_t datasize,
+ struct _cffi_freeme_s **freeme)
+{
+ char *p;
+ if (datasize < 0)
+ return -1;
+
+ p = *output_data;
+ if (p == NULL) {
+ struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc(
+ offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize);
+ if (fp == NULL)
+ return -1;
+ fp->next = *freeme;
+ *freeme = fp;
+ p = *output_data = (char *)&fp->alignment;
+ }
+ memset((void *)p, 0, (size_t)datasize);
+ return _cffi_convert_array_from_object(p, ctptr, arg);
+}
+
+#ifdef __GNUC__
+ __attribute__((unused))
+#endif
+static void _cffi_free_array_arguments(struct _cffi_freeme_s *freeme)
+{
+ do {
+ void *p = (void *)freeme;
+ freeme = freeme->next;
+ PyObject_Free(p);
+ } while (freeme != NULL);
+}
+
+static int _cffi_init(void)
+{
+ PyObject *module, *c_api_object = NULL;
+
+ module = PyImport_ImportModule("_cffi_backend");
+ if (module == NULL)
+ goto failure;
+
+ c_api_object = PyObject_GetAttrString(module, "_C_API");
+ if (c_api_object == NULL)
+ goto failure;
+ if (!PyCapsule_CheckExact(c_api_object)) {
+ PyErr_SetNone(PyExc_ImportError);
+ goto failure;
+ }
+ memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"),
+ _CFFI_NUM_EXPORTS * sizeof(void *));
+
+ Py_DECREF(module);
+ Py_DECREF(c_api_object);
+ return 0;
+
+ failure:
+ Py_XDECREF(module);
+ Py_XDECREF(c_api_object);
+ return -1;
+}
+
+#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num))
+
+/**********/
+'''
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/vengine_gen.py b/Backend/venv/lib/python3.12/site-packages/cffi/vengine_gen.py
new file mode 100644
index 00000000..bffc8212
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/vengine_gen.py
@@ -0,0 +1,679 @@
+#
+# DEPRECATED: implementation for ffi.verify()
+#
+import sys, os
+import types
+
+from . import model
+from .error import VerificationError
+
+
+class VGenericEngine(object):
+ _class_key = 'g'
+ _gen_python_module = False
+
+ def __init__(self, verifier):
+ self.verifier = verifier
+ self.ffi = verifier.ffi
+ self.export_symbols = []
+ self._struct_pending_verification = {}
+
+ def patch_extension_kwds(self, kwds):
+ # add 'export_symbols' to the dictionary. Note that we add the
+ # list before filling it. When we fill it, it will thus also show
+ # up in kwds['export_symbols'].
+ kwds.setdefault('export_symbols', self.export_symbols)
+
+ def find_module(self, module_name, path, so_suffixes):
+ for so_suffix in so_suffixes:
+ basename = module_name + so_suffix
+ if path is None:
+ path = sys.path
+ for dirname in path:
+ filename = os.path.join(dirname, basename)
+ if os.path.isfile(filename):
+ return filename
+
+ def collect_types(self):
+ pass # not needed in the generic engine
+
+ def _prnt(self, what=''):
+ self._f.write(what + '\n')
+
+ def write_source_to_f(self):
+ prnt = self._prnt
+ # first paste some standard set of lines that are mostly '#include'
+ prnt(cffimod_header)
+ # then paste the C source given by the user, verbatim.
+ prnt(self.verifier.preamble)
+ #
+ # call generate_gen_xxx_decl(), for every xxx found from
+ # ffi._parser._declarations. This generates all the functions.
+ self._generate('decl')
+ #
+ # on Windows, distutils insists on putting init_cffi_xyz in
+ # 'export_symbols', so instead of fighting it, just give up and
+ # give it one
+ if sys.platform == 'win32':
+ if sys.version_info >= (3,):
+ prefix = 'PyInit_'
+ else:
+ prefix = 'init'
+ modname = self.verifier.get_module_name()
+ prnt("void %s%s(void) { }\n" % (prefix, modname))
+
+ def load_library(self, flags=0):
+ # import it with the CFFI backend
+ backend = self.ffi._backend
+ # needs to make a path that contains '/', on Posix
+ filename = os.path.join(os.curdir, self.verifier.modulefilename)
+ module = backend.load_library(filename, flags)
+ #
+ # call loading_gen_struct() to get the struct layout inferred by
+ # the C compiler
+ self._load(module, 'loading')
+
+ # build the FFILibrary class and instance, this is a module subclass
+ # because modules are expected to have usually-constant-attributes and
+ # in PyPy this means the JIT is able to treat attributes as constant,
+ # which we want.
+ class FFILibrary(types.ModuleType):
+ _cffi_generic_module = module
+ _cffi_ffi = self.ffi
+ _cffi_dir = []
+ def __dir__(self):
+ return FFILibrary._cffi_dir
+ library = FFILibrary("")
+ #
+ # finally, call the loaded_gen_xxx() functions. This will set
+ # up the 'library' object.
+ self._load(module, 'loaded', library=library)
+ return library
+
+ def _get_declarations(self):
+ lst = [(key, tp) for (key, (tp, qual)) in
+ self.ffi._parser._declarations.items()]
+ lst.sort()
+ return lst
+
+ def _generate(self, step_name):
+ for name, tp in self._get_declarations():
+ kind, realname = name.split(' ', 1)
+ try:
+ method = getattr(self, '_generate_gen_%s_%s' % (kind,
+ step_name))
+ except AttributeError:
+ raise VerificationError(
+ "not implemented in verify(): %r" % name)
+ try:
+ method(tp, realname)
+ except Exception as e:
+ model.attach_exception_info(e, name)
+ raise
+
+ def _load(self, module, step_name, **kwds):
+ for name, tp in self._get_declarations():
+ kind, realname = name.split(' ', 1)
+ method = getattr(self, '_%s_gen_%s' % (step_name, kind))
+ try:
+ method(tp, realname, module, **kwds)
+ except Exception as e:
+ model.attach_exception_info(e, name)
+ raise
+
+ def _generate_nothing(self, tp, name):
+ pass
+
+ def _loaded_noop(self, tp, name, module, **kwds):
+ pass
+
+ # ----------
+ # typedefs: generates no code so far
+
+ _generate_gen_typedef_decl = _generate_nothing
+ _loading_gen_typedef = _loaded_noop
+ _loaded_gen_typedef = _loaded_noop
+
+ # ----------
+ # function declarations
+
+ def _generate_gen_function_decl(self, tp, name):
+ assert isinstance(tp, model.FunctionPtrType)
+ if tp.ellipsis:
+ # cannot support vararg functions better than this: check for its
+ # exact type (including the fixed arguments), and build it as a
+ # constant function pointer (no _cffi_f_%s wrapper)
+ self._generate_gen_const(False, name, tp)
+ return
+ prnt = self._prnt
+ numargs = len(tp.args)
+ argnames = []
+ for i, type in enumerate(tp.args):
+ indirection = ''
+ if isinstance(type, model.StructOrUnion):
+ indirection = '*'
+ argnames.append('%sx%d' % (indirection, i))
+ context = 'argument of %s' % name
+ arglist = [type.get_c_name(' %s' % arg, context)
+ for type, arg in zip(tp.args, argnames)]
+ tpresult = tp.result
+ if isinstance(tpresult, model.StructOrUnion):
+ arglist.insert(0, tpresult.get_c_name(' *r', context))
+ tpresult = model.void_type
+ arglist = ', '.join(arglist) or 'void'
+ wrappername = '_cffi_f_%s' % name
+ self.export_symbols.append(wrappername)
+ if tp.abi:
+ abi = tp.abi + ' '
+ else:
+ abi = ''
+ funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist)
+ context = 'result of %s' % name
+ prnt(tpresult.get_c_name(funcdecl, context))
+ prnt('{')
+ #
+ if isinstance(tp.result, model.StructOrUnion):
+ result_code = '*r = '
+ elif not isinstance(tp.result, model.VoidType):
+ result_code = 'return '
+ else:
+ result_code = ''
+ prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames)))
+ prnt('}')
+ prnt()
+
+ _loading_gen_function = _loaded_noop
+
+ def _loaded_gen_function(self, tp, name, module, library):
+ assert isinstance(tp, model.FunctionPtrType)
+ if tp.ellipsis:
+ newfunction = self._load_constant(False, tp, name, module)
+ else:
+ indirections = []
+ base_tp = tp
+ if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args)
+ or isinstance(tp.result, model.StructOrUnion)):
+ indirect_args = []
+ for i, typ in enumerate(tp.args):
+ if isinstance(typ, model.StructOrUnion):
+ typ = model.PointerType(typ)
+ indirections.append((i, typ))
+ indirect_args.append(typ)
+ indirect_result = tp.result
+ if isinstance(indirect_result, model.StructOrUnion):
+ if indirect_result.fldtypes is None:
+ raise TypeError("'%s' is used as result type, "
+ "but is opaque" % (
+ indirect_result._get_c_name(),))
+ indirect_result = model.PointerType(indirect_result)
+ indirect_args.insert(0, indirect_result)
+ indirections.insert(0, ("result", indirect_result))
+ indirect_result = model.void_type
+ tp = model.FunctionPtrType(tuple(indirect_args),
+ indirect_result, tp.ellipsis)
+ BFunc = self.ffi._get_cached_btype(tp)
+ wrappername = '_cffi_f_%s' % name
+ newfunction = module.load_function(BFunc, wrappername)
+ for i, typ in indirections:
+ newfunction = self._make_struct_wrapper(newfunction, i, typ,
+ base_tp)
+ setattr(library, name, newfunction)
+ type(library)._cffi_dir.append(name)
+
+ def _make_struct_wrapper(self, oldfunc, i, tp, base_tp):
+ backend = self.ffi._backend
+ BType = self.ffi._get_cached_btype(tp)
+ if i == "result":
+ ffi = self.ffi
+ def newfunc(*args):
+ res = ffi.new(BType)
+ oldfunc(res, *args)
+ return res[0]
+ else:
+ def newfunc(*args):
+ args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:]
+ return oldfunc(*args)
+ newfunc._cffi_base_type = base_tp
+ return newfunc
+
+ # ----------
+ # named structs
+
+ def _generate_gen_struct_decl(self, tp, name):
+ assert name == tp.name
+ self._generate_struct_or_union_decl(tp, 'struct', name)
+
+ def _loading_gen_struct(self, tp, name, module):
+ self._loading_struct_or_union(tp, 'struct', name, module)
+
+ def _loaded_gen_struct(self, tp, name, module, **kwds):
+ self._loaded_struct_or_union(tp)
+
+ def _generate_gen_union_decl(self, tp, name):
+ assert name == tp.name
+ self._generate_struct_or_union_decl(tp, 'union', name)
+
+ def _loading_gen_union(self, tp, name, module):
+ self._loading_struct_or_union(tp, 'union', name, module)
+
+ def _loaded_gen_union(self, tp, name, module, **kwds):
+ self._loaded_struct_or_union(tp)
+
+ def _generate_struct_or_union_decl(self, tp, prefix, name):
+ if tp.fldnames is None:
+ return # nothing to do with opaque structs
+ checkfuncname = '_cffi_check_%s_%s' % (prefix, name)
+ layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
+ cname = ('%s %s' % (prefix, name)).strip()
+ #
+ prnt = self._prnt
+ prnt('static void %s(%s *p)' % (checkfuncname, cname))
+ prnt('{')
+ prnt(' /* only to generate compile-time warnings or errors */')
+ prnt(' (void)p;')
+ for fname, ftype, fbitsize, fqual in tp.enumfields():
+ if (isinstance(ftype, model.PrimitiveType)
+ and ftype.is_integer_type()) or fbitsize >= 0:
+ # accept all integers, but complain on float or double
+ prnt(' (void)((p->%s) << 1);' % fname)
+ else:
+ # only accept exactly the type declared.
+ try:
+ prnt(' { %s = &p->%s; (void)tmp; }' % (
+ ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
+ fname))
+ except VerificationError as e:
+ prnt(' /* %s */' % str(e)) # cannot verify it, ignore
+ prnt('}')
+ self.export_symbols.append(layoutfuncname)
+ prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,))
+ prnt('{')
+ prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname)
+ prnt(' static intptr_t nums[] = {')
+ prnt(' sizeof(%s),' % cname)
+ prnt(' offsetof(struct _cffi_aligncheck, y),')
+ for fname, ftype, fbitsize, fqual in tp.enumfields():
+ if fbitsize >= 0:
+ continue # xxx ignore fbitsize for now
+ prnt(' offsetof(%s, %s),' % (cname, fname))
+ if isinstance(ftype, model.ArrayType) and ftype.length is None:
+ prnt(' 0, /* %s */' % ftype._get_c_name())
+ else:
+ prnt(' sizeof(((%s *)0)->%s),' % (cname, fname))
+ prnt(' -1')
+ prnt(' };')
+ prnt(' return nums[i];')
+ prnt(' /* the next line is not executed, but compiled */')
+ prnt(' %s(0);' % (checkfuncname,))
+ prnt('}')
+ prnt()
+
+ def _loading_struct_or_union(self, tp, prefix, name, module):
+ if tp.fldnames is None:
+ return # nothing to do with opaque structs
+ layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
+ #
+ BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0]
+ function = module.load_function(BFunc, layoutfuncname)
+ layout = []
+ num = 0
+ while True:
+ x = function(num)
+ if x < 0: break
+ layout.append(x)
+ num += 1
+ if isinstance(tp, model.StructOrUnion) and tp.partial:
+ # use the function()'s sizes and offsets to guide the
+ # layout of the struct
+ totalsize = layout[0]
+ totalalignment = layout[1]
+ fieldofs = layout[2::2]
+ fieldsize = layout[3::2]
+ tp.force_flatten()
+ assert len(fieldofs) == len(fieldsize) == len(tp.fldnames)
+ tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
+ else:
+ cname = ('%s %s' % (prefix, name)).strip()
+ self._struct_pending_verification[tp] = layout, cname
+
+ def _loaded_struct_or_union(self, tp):
+ if tp.fldnames is None:
+ return # nothing to do with opaque structs
+ self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered
+
+ if tp in self._struct_pending_verification:
+ # check that the layout sizes and offsets match the real ones
+ def check(realvalue, expectedvalue, msg):
+ if realvalue != expectedvalue:
+ raise VerificationError(
+ "%s (we have %d, but C compiler says %d)"
+ % (msg, expectedvalue, realvalue))
+ ffi = self.ffi
+ BStruct = ffi._get_cached_btype(tp)
+ layout, cname = self._struct_pending_verification.pop(tp)
+ check(layout[0], ffi.sizeof(BStruct), "wrong total size")
+ check(layout[1], ffi.alignof(BStruct), "wrong total alignment")
+ i = 2
+ for fname, ftype, fbitsize, fqual in tp.enumfields():
+ if fbitsize >= 0:
+ continue # xxx ignore fbitsize for now
+ check(layout[i], ffi.offsetof(BStruct, fname),
+ "wrong offset for field %r" % (fname,))
+ if layout[i+1] != 0:
+ BField = ffi._get_cached_btype(ftype)
+ check(layout[i+1], ffi.sizeof(BField),
+ "wrong size for field %r" % (fname,))
+ i += 2
+ assert i == len(layout)
+
+ # ----------
+ # 'anonymous' declarations. These are produced for anonymous structs
+ # or unions; the 'name' is obtained by a typedef.
+
+ def _generate_gen_anonymous_decl(self, tp, name):
+ if isinstance(tp, model.EnumType):
+ self._generate_gen_enum_decl(tp, name, '')
+ else:
+ self._generate_struct_or_union_decl(tp, '', name)
+
+ def _loading_gen_anonymous(self, tp, name, module):
+ if isinstance(tp, model.EnumType):
+ self._loading_gen_enum(tp, name, module, '')
+ else:
+ self._loading_struct_or_union(tp, '', name, module)
+
+ def _loaded_gen_anonymous(self, tp, name, module, **kwds):
+ if isinstance(tp, model.EnumType):
+ self._loaded_gen_enum(tp, name, module, **kwds)
+ else:
+ self._loaded_struct_or_union(tp)
+
+ # ----------
+ # constants, likely declared with '#define'
+
+ def _generate_gen_const(self, is_int, name, tp=None, category='const',
+ check_value=None):
+ prnt = self._prnt
+ funcname = '_cffi_%s_%s' % (category, name)
+ self.export_symbols.append(funcname)
+ if check_value is not None:
+ assert is_int
+ assert category == 'const'
+ prnt('int %s(char *out_error)' % funcname)
+ prnt('{')
+ self._check_int_constant_value(name, check_value)
+ prnt(' return 0;')
+ prnt('}')
+ elif is_int:
+ assert category == 'const'
+ prnt('int %s(long long *out_value)' % funcname)
+ prnt('{')
+ prnt(' *out_value = (long long)(%s);' % (name,))
+ prnt(' return (%s) <= 0;' % (name,))
+ prnt('}')
+ else:
+ assert tp is not None
+ assert check_value is None
+ if category == 'var':
+ ampersand = '&'
+ else:
+ ampersand = ''
+ extra = ''
+ if category == 'const' and isinstance(tp, model.StructOrUnion):
+ extra = 'const *'
+ ampersand = '&'
+ prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name))
+ prnt('{')
+ prnt(' return (%s%s);' % (ampersand, name))
+ prnt('}')
+ prnt()
+
+ def _generate_gen_constant_decl(self, tp, name):
+ is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
+ self._generate_gen_const(is_int, name, tp)
+
+ _loading_gen_constant = _loaded_noop
+
+ def _load_constant(self, is_int, tp, name, module, check_value=None):
+ funcname = '_cffi_const_%s' % name
+ if check_value is not None:
+ assert is_int
+ self._load_known_int_constant(module, funcname)
+ value = check_value
+ elif is_int:
+ BType = self.ffi._typeof_locked("long long*")[0]
+ BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0]
+ function = module.load_function(BFunc, funcname)
+ p = self.ffi.new(BType)
+ negative = function(p)
+ value = int(p[0])
+ if value < 0 and not negative:
+ BLongLong = self.ffi._typeof_locked("long long")[0]
+ value += (1 << (8*self.ffi.sizeof(BLongLong)))
+ else:
+ assert check_value is None
+ fntypeextra = '(*)(void)'
+ if isinstance(tp, model.StructOrUnion):
+ fntypeextra = '*' + fntypeextra
+ BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0]
+ function = module.load_function(BFunc, funcname)
+ value = function()
+ if isinstance(tp, model.StructOrUnion):
+ value = value[0]
+ return value
+
+ def _loaded_gen_constant(self, tp, name, module, library):
+ is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
+ value = self._load_constant(is_int, tp, name, module)
+ setattr(library, name, value)
+ type(library)._cffi_dir.append(name)
+
+ # ----------
+ # enums
+
+ def _check_int_constant_value(self, name, value):
+ prnt = self._prnt
+ if value <= 0:
+ prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % (
+ name, name, value))
+ else:
+ prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
+ name, name, value))
+ prnt(' char buf[64];')
+ prnt(' if ((%s) <= 0)' % name)
+ prnt(' sprintf(buf, "%%ld", (long)(%s));' % name)
+ prnt(' else')
+ prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' %
+ name)
+ prnt(' sprintf(out_error, "%s has the real value %s, not %s",')
+ prnt(' "%s", buf, "%d");' % (name[:100], value))
+ prnt(' return -1;')
+ prnt(' }')
+
+ def _load_known_int_constant(self, module, funcname):
+ BType = self.ffi._typeof_locked("char[]")[0]
+ BFunc = self.ffi._typeof_locked("int(*)(char*)")[0]
+ function = module.load_function(BFunc, funcname)
+ p = self.ffi.new(BType, 256)
+ if function(p) < 0:
+ error = self.ffi.string(p)
+ if sys.version_info >= (3,):
+ error = str(error, 'utf-8')
+ raise VerificationError(error)
+
+ def _enum_funcname(self, prefix, name):
+ # "$enum_$1" => "___D_enum____D_1"
+ name = name.replace('$', '___D_')
+ return '_cffi_e_%s_%s' % (prefix, name)
+
+ def _generate_gen_enum_decl(self, tp, name, prefix='enum'):
+ if tp.partial:
+ for enumerator in tp.enumerators:
+ self._generate_gen_const(True, enumerator)
+ return
+ #
+ funcname = self._enum_funcname(prefix, name)
+ self.export_symbols.append(funcname)
+ prnt = self._prnt
+ prnt('int %s(char *out_error)' % funcname)
+ prnt('{')
+ for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
+ self._check_int_constant_value(enumerator, enumvalue)
+ prnt(' return 0;')
+ prnt('}')
+ prnt()
+
+ def _loading_gen_enum(self, tp, name, module, prefix='enum'):
+ if tp.partial:
+ enumvalues = [self._load_constant(True, tp, enumerator, module)
+ for enumerator in tp.enumerators]
+ tp.enumvalues = tuple(enumvalues)
+ tp.partial_resolved = True
+ else:
+ funcname = self._enum_funcname(prefix, name)
+ self._load_known_int_constant(module, funcname)
+
+ def _loaded_gen_enum(self, tp, name, module, library):
+ for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
+ setattr(library, enumerator, enumvalue)
+ type(library)._cffi_dir.append(enumerator)
+
+ # ----------
+ # macros: for now only for integers
+
+ def _generate_gen_macro_decl(self, tp, name):
+ if tp == '...':
+ check_value = None
+ else:
+ check_value = tp # an integer
+ self._generate_gen_const(True, name, check_value=check_value)
+
+ _loading_gen_macro = _loaded_noop
+
+ def _loaded_gen_macro(self, tp, name, module, library):
+ if tp == '...':
+ check_value = None
+ else:
+ check_value = tp # an integer
+ value = self._load_constant(True, tp, name, module,
+ check_value=check_value)
+ setattr(library, name, value)
+ type(library)._cffi_dir.append(name)
+
+ # ----------
+ # global variables
+
+ def _generate_gen_variable_decl(self, tp, name):
+ if isinstance(tp, model.ArrayType):
+ if tp.length_is_unknown():
+ prnt = self._prnt
+ funcname = '_cffi_sizeof_%s' % (name,)
+ self.export_symbols.append(funcname)
+ prnt("size_t %s(void)" % funcname)
+ prnt("{")
+ prnt(" return sizeof(%s);" % (name,))
+ prnt("}")
+ tp_ptr = model.PointerType(tp.item)
+ self._generate_gen_const(False, name, tp_ptr)
+ else:
+ tp_ptr = model.PointerType(tp)
+ self._generate_gen_const(False, name, tp_ptr, category='var')
+
+ _loading_gen_variable = _loaded_noop
+
+ def _loaded_gen_variable(self, tp, name, module, library):
+ if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the
+ # sense that "a=..." is forbidden
+ if tp.length_is_unknown():
+ funcname = '_cffi_sizeof_%s' % (name,)
+ BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0]
+ function = module.load_function(BFunc, funcname)
+ size = function()
+ BItemType = self.ffi._get_cached_btype(tp.item)
+ length, rest = divmod(size, self.ffi.sizeof(BItemType))
+ if rest != 0:
+ raise VerificationError(
+ "bad size: %r does not seem to be an array of %s" %
+ (name, tp.item))
+ tp = tp.resolve_length(length)
+ tp_ptr = model.PointerType(tp.item)
+ value = self._load_constant(False, tp_ptr, name, module)
+ # 'value' is a which we have to replace with
+ # a if the N is actually known
+ if tp.length is not None:
+ BArray = self.ffi._get_cached_btype(tp)
+ value = self.ffi.cast(BArray, value)
+ setattr(library, name, value)
+ type(library)._cffi_dir.append(name)
+ return
+ # remove ptr= from the library instance, and replace
+ # it by a property on the class, which reads/writes into ptr[0].
+ funcname = '_cffi_var_%s' % name
+ BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0]
+ function = module.load_function(BFunc, funcname)
+ ptr = function()
+ def getter(library):
+ return ptr[0]
+ def setter(library, value):
+ ptr[0] = value
+ setattr(type(library), name, property(getter, setter))
+ type(library)._cffi_dir.append(name)
+
+cffimod_header = r'''
+#include
+#include
+#include
+#include
+#include /* XXX for ssize_t on some platforms */
+
+/* this block of #ifs should be kept exactly identical between
+ c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
+ and cffi/_cffi_include.h */
+#if defined(_MSC_VER)
+# include /* for alloca() */
+# if _MSC_VER < 1600 /* MSVC < 2010 */
+ typedef __int8 int8_t;
+ typedef __int16 int16_t;
+ typedef __int32 int32_t;
+ typedef __int64 int64_t;
+ typedef unsigned __int8 uint8_t;
+ typedef unsigned __int16 uint16_t;
+ typedef unsigned __int32 uint32_t;
+ typedef unsigned __int64 uint64_t;
+ typedef __int8 int_least8_t;
+ typedef __int16 int_least16_t;
+ typedef __int32 int_least32_t;
+ typedef __int64 int_least64_t;
+ typedef unsigned __int8 uint_least8_t;
+ typedef unsigned __int16 uint_least16_t;
+ typedef unsigned __int32 uint_least32_t;
+ typedef unsigned __int64 uint_least64_t;
+ typedef __int8 int_fast8_t;
+ typedef __int16 int_fast16_t;
+ typedef __int32 int_fast32_t;
+ typedef __int64 int_fast64_t;
+ typedef unsigned __int8 uint_fast8_t;
+ typedef unsigned __int16 uint_fast16_t;
+ typedef unsigned __int32 uint_fast32_t;
+ typedef unsigned __int64 uint_fast64_t;
+ typedef __int64 intmax_t;
+ typedef unsigned __int64 uintmax_t;
+# else
+# include
+# endif
+# if _MSC_VER < 1800 /* MSVC < 2013 */
+# ifndef __cplusplus
+ typedef unsigned char _Bool;
+# endif
+# endif
+# define _cffi_float_complex_t _Fcomplex /* include for it */
+# define _cffi_double_complex_t _Dcomplex /* include for it */
+#else
+# include
+# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
+# include
+# endif
+# define _cffi_float_complex_t float _Complex
+# define _cffi_double_complex_t double _Complex
+#endif
+'''
diff --git a/Backend/venv/lib/python3.12/site-packages/cffi/verifier.py b/Backend/venv/lib/python3.12/site-packages/cffi/verifier.py
new file mode 100644
index 00000000..e392a2b7
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cffi/verifier.py
@@ -0,0 +1,306 @@
+#
+# DEPRECATED: implementation for ffi.verify()
+#
+import sys, os, binascii, shutil, io
+from . import __version_verifier_modules__
+from . import ffiplatform
+from .error import VerificationError
+
+if sys.version_info >= (3, 3):
+ import importlib.machinery
+ def _extension_suffixes():
+ return importlib.machinery.EXTENSION_SUFFIXES[:]
+else:
+ import imp
+ def _extension_suffixes():
+ return [suffix for suffix, _, type in imp.get_suffixes()
+ if type == imp.C_EXTENSION]
+
+
+if sys.version_info >= (3,):
+ NativeIO = io.StringIO
+else:
+ class NativeIO(io.BytesIO):
+ def write(self, s):
+ if isinstance(s, unicode):
+ s = s.encode('ascii')
+ super(NativeIO, self).write(s)
+
+
+class Verifier(object):
+
+ def __init__(self, ffi, preamble, tmpdir=None, modulename=None,
+ ext_package=None, tag='', force_generic_engine=False,
+ source_extension='.c', flags=None, relative_to=None, **kwds):
+ if ffi._parser._uses_new_feature:
+ raise VerificationError(
+ "feature not supported with ffi.verify(), but only "
+ "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,))
+ self.ffi = ffi
+ self.preamble = preamble
+ if not modulename:
+ flattened_kwds = ffiplatform.flatten(kwds)
+ vengine_class = _locate_engine_class(ffi, force_generic_engine)
+ self._vengine = vengine_class(self)
+ self._vengine.patch_extension_kwds(kwds)
+ self.flags = flags
+ self.kwds = self.make_relative_to(kwds, relative_to)
+ #
+ if modulename:
+ if tag:
+ raise TypeError("can't specify both 'modulename' and 'tag'")
+ else:
+ key = '\x00'.join(['%d.%d' % sys.version_info[:2],
+ __version_verifier_modules__,
+ preamble, flattened_kwds] +
+ ffi._cdefsources)
+ if sys.version_info >= (3,):
+ key = key.encode('utf-8')
+ k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff)
+ k1 = k1.lstrip('0x').rstrip('L')
+ k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff)
+ k2 = k2.lstrip('0').rstrip('L')
+ modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key,
+ k1, k2)
+ suffix = _get_so_suffixes()[0]
+ self.tmpdir = tmpdir or _caller_dir_pycache()
+ self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension)
+ self.modulefilename = os.path.join(self.tmpdir, modulename + suffix)
+ self.ext_package = ext_package
+ self._has_source = False
+ self._has_module = False
+
+ def write_source(self, file=None):
+ """Write the C source code. It is produced in 'self.sourcefilename',
+ which can be tweaked beforehand."""
+ with self.ffi._lock:
+ if self._has_source and file is None:
+ raise VerificationError(
+ "source code already written")
+ self._write_source(file)
+
+ def compile_module(self):
+ """Write the C source code (if not done already) and compile it.
+ This produces a dynamic link library in 'self.modulefilename'."""
+ with self.ffi._lock:
+ if self._has_module:
+ raise VerificationError("module already compiled")
+ if not self._has_source:
+ self._write_source()
+ self._compile_module()
+
+ def load_library(self):
+ """Get a C module from this Verifier instance.
+ Returns an instance of a FFILibrary class that behaves like the
+ objects returned by ffi.dlopen(), but that delegates all
+ operations to the C module. If necessary, the C code is written
+ and compiled first.
+ """
+ with self.ffi._lock:
+ if not self._has_module:
+ self._locate_module()
+ if not self._has_module:
+ if not self._has_source:
+ self._write_source()
+ self._compile_module()
+ return self._load_library()
+
+ def get_module_name(self):
+ basename = os.path.basename(self.modulefilename)
+ # kill both the .so extension and the other .'s, as introduced
+ # by Python 3: 'basename.cpython-33m.so'
+ basename = basename.split('.', 1)[0]
+ # and the _d added in Python 2 debug builds --- but try to be
+ # conservative and not kill a legitimate _d
+ if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'):
+ basename = basename[:-2]
+ return basename
+
+ def get_extension(self):
+ if not self._has_source:
+ with self.ffi._lock:
+ if not self._has_source:
+ self._write_source()
+ sourcename = ffiplatform.maybe_relative_path(self.sourcefilename)
+ modname = self.get_module_name()
+ return ffiplatform.get_extension(sourcename, modname, **self.kwds)
+
+ def generates_python_module(self):
+ return self._vengine._gen_python_module
+
+ def make_relative_to(self, kwds, relative_to):
+ if relative_to and os.path.dirname(relative_to):
+ dirname = os.path.dirname(relative_to)
+ kwds = kwds.copy()
+ for key in ffiplatform.LIST_OF_FILE_NAMES:
+ if key in kwds:
+ lst = kwds[key]
+ if not isinstance(lst, (list, tuple)):
+ raise TypeError("keyword '%s' should be a list or tuple"
+ % (key,))
+ lst = [os.path.join(dirname, fn) for fn in lst]
+ kwds[key] = lst
+ return kwds
+
+ # ----------
+
+ def _locate_module(self):
+ if not os.path.isfile(self.modulefilename):
+ if self.ext_package:
+ try:
+ pkg = __import__(self.ext_package, None, None, ['__doc__'])
+ except ImportError:
+ return # cannot import the package itself, give up
+ # (e.g. it might be called differently before installation)
+ path = pkg.__path__
+ else:
+ path = None
+ filename = self._vengine.find_module(self.get_module_name(), path,
+ _get_so_suffixes())
+ if filename is None:
+ return
+ self.modulefilename = filename
+ self._vengine.collect_types()
+ self._has_module = True
+
+ def _write_source_to(self, file):
+ self._vengine._f = file
+ try:
+ self._vengine.write_source_to_f()
+ finally:
+ del self._vengine._f
+
+ def _write_source(self, file=None):
+ if file is not None:
+ self._write_source_to(file)
+ else:
+ # Write our source file to an in memory file.
+ f = NativeIO()
+ self._write_source_to(f)
+ source_data = f.getvalue()
+
+ # Determine if this matches the current file
+ if os.path.exists(self.sourcefilename):
+ with open(self.sourcefilename, "r") as fp:
+ needs_written = not (fp.read() == source_data)
+ else:
+ needs_written = True
+
+ # Actually write the file out if it doesn't match
+ if needs_written:
+ _ensure_dir(self.sourcefilename)
+ with open(self.sourcefilename, "w") as fp:
+ fp.write(source_data)
+
+ # Set this flag
+ self._has_source = True
+
+ def _compile_module(self):
+ # compile this C source
+ tmpdir = os.path.dirname(self.sourcefilename)
+ outputfilename = ffiplatform.compile(tmpdir, self.get_extension())
+ try:
+ same = ffiplatform.samefile(outputfilename, self.modulefilename)
+ except OSError:
+ same = False
+ if not same:
+ _ensure_dir(self.modulefilename)
+ shutil.move(outputfilename, self.modulefilename)
+ self._has_module = True
+
+ def _load_library(self):
+ assert self._has_module
+ if self.flags is not None:
+ return self._vengine.load_library(self.flags)
+ else:
+ return self._vengine.load_library()
+
+# ____________________________________________________________
+
+_FORCE_GENERIC_ENGINE = False # for tests
+
+def _locate_engine_class(ffi, force_generic_engine):
+ if _FORCE_GENERIC_ENGINE:
+ force_generic_engine = True
+ if not force_generic_engine:
+ if '__pypy__' in sys.builtin_module_names:
+ force_generic_engine = True
+ else:
+ try:
+ import _cffi_backend
+ except ImportError:
+ _cffi_backend = '?'
+ if ffi._backend is not _cffi_backend:
+ force_generic_engine = True
+ if force_generic_engine:
+ from . import vengine_gen
+ return vengine_gen.VGenericEngine
+ else:
+ from . import vengine_cpy
+ return vengine_cpy.VCPythonEngine
+
+# ____________________________________________________________
+
+_TMPDIR = None
+
+def _caller_dir_pycache():
+ if _TMPDIR:
+ return _TMPDIR
+ result = os.environ.get('CFFI_TMPDIR')
+ if result:
+ return result
+ filename = sys._getframe(2).f_code.co_filename
+ return os.path.abspath(os.path.join(os.path.dirname(filename),
+ '__pycache__'))
+
+def set_tmpdir(dirname):
+ """Set the temporary directory to use instead of __pycache__."""
+ global _TMPDIR
+ _TMPDIR = dirname
+
+def cleanup_tmpdir(tmpdir=None, keep_so=False):
+ """Clean up the temporary directory by removing all files in it
+ called `_cffi_*.{c,so}` as well as the `build` subdirectory."""
+ tmpdir = tmpdir or _caller_dir_pycache()
+ try:
+ filelist = os.listdir(tmpdir)
+ except OSError:
+ return
+ if keep_so:
+ suffix = '.c' # only remove .c files
+ else:
+ suffix = _get_so_suffixes()[0].lower()
+ for fn in filelist:
+ if fn.lower().startswith('_cffi_') and (
+ fn.lower().endswith(suffix) or fn.lower().endswith('.c')):
+ try:
+ os.unlink(os.path.join(tmpdir, fn))
+ except OSError:
+ pass
+ clean_dir = [os.path.join(tmpdir, 'build')]
+ for dir in clean_dir:
+ try:
+ for fn in os.listdir(dir):
+ fn = os.path.join(dir, fn)
+ if os.path.isdir(fn):
+ clean_dir.append(fn)
+ else:
+ os.unlink(fn)
+ except OSError:
+ pass
+
+def _get_so_suffixes():
+ suffixes = _extension_suffixes()
+ if not suffixes:
+ # bah, no C_EXTENSION available. Occurs on pypy without cpyext
+ if sys.platform == 'win32':
+ suffixes = [".pyd"]
+ else:
+ suffixes = [".so"]
+
+ return suffixes
+
+def _ensure_dir(filename):
+ dirname = os.path.dirname(filename)
+ if dirname and not os.path.isdir(dirname):
+ os.makedirs(dirname)
diff --git a/Backend/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Backend/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/METADATA
new file mode 100644
index 00000000..3f433afb
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/METADATA
@@ -0,0 +1,84 @@
+Metadata-Version: 2.4
+Name: click
+Version: 8.3.1
+Summary: Composable command line interface toolkit
+Maintainer-email: Pallets
+Requires-Python: >=3.10
+Description-Content-Type: text/markdown
+License-Expression: BSD-3-Clause
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Typing :: Typed
+License-File: LICENSE.txt
+Requires-Dist: colorama; platform_system == 'Windows'
+Project-URL: Changes, https://click.palletsprojects.com/page/changes/
+Project-URL: Chat, https://discord.gg/pallets
+Project-URL: Documentation, https://click.palletsprojects.com/
+Project-URL: Donate, https://palletsprojects.com/donate
+Project-URL: Source, https://github.com/pallets/click/
+
+
+
+# Click
+
+Click is a Python package for creating beautiful command line interfaces
+in a composable way with as little code as necessary. It's the "Command
+Line Interface Creation Kit". It's highly configurable but comes with
+sensible defaults out of the box.
+
+It aims to make the process of writing command line tools quick and fun
+while also preventing any frustration caused by the inability to
+implement an intended CLI API.
+
+Click in three points:
+
+- Arbitrary nesting of commands
+- Automatic help page generation
+- Supports lazy loading of subcommands at runtime
+
+
+## A Simple Example
+
+```python
+import click
+
+@click.command()
+@click.option("--count", default=1, help="Number of greetings.")
+@click.option("--name", prompt="Your name", help="The person to greet.")
+def hello(count, name):
+ """Simple program that greets NAME for a total of COUNT times."""
+ for _ in range(count):
+ click.echo(f"Hello, {name}!")
+
+if __name__ == '__main__':
+ hello()
+```
+
+```
+$ python hello.py --count=3
+Your name: Click
+Hello, Click!
+Hello, Click!
+Hello, Click!
+```
+
+
+## Donate
+
+The Pallets organization develops and supports Click and other popular
+packages. In order to grow the community of contributors and users, and
+allow the maintainers to devote more time to the projects, [please
+donate today][].
+
+[please donate today]: https://palletsprojects.com/donate
+
+## Contributing
+
+See our [detailed contributing documentation][contrib] for many ways to
+contribute, including reporting issues, requesting features, asking or answering
+questions, and making PRs.
+
+[contrib]: https://palletsprojects.com/contributing/
+
diff --git a/Backend/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/RECORD
new file mode 100644
index 00000000..77e5c989
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/RECORD
@@ -0,0 +1,40 @@
+click-8.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+click-8.3.1.dist-info/METADATA,sha256=XZeBrMAE0ghTE88SjfrSDuSyNCpBPplxJR1tbwD9oZg,2621
+click-8.3.1.dist-info/RECORD,,
+click-8.3.1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
+click-8.3.1.dist-info/licenses/LICENSE.txt,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475
+click/__init__.py,sha256=6YyS1aeyknZ0LYweWozNZy0A9nZ_11wmYIhv3cbQrYo,4473
+click/__pycache__/__init__.cpython-312.pyc,,
+click/__pycache__/_compat.cpython-312.pyc,,
+click/__pycache__/_termui_impl.cpython-312.pyc,,
+click/__pycache__/_textwrap.cpython-312.pyc,,
+click/__pycache__/_utils.cpython-312.pyc,,
+click/__pycache__/_winconsole.cpython-312.pyc,,
+click/__pycache__/core.cpython-312.pyc,,
+click/__pycache__/decorators.cpython-312.pyc,,
+click/__pycache__/exceptions.cpython-312.pyc,,
+click/__pycache__/formatting.cpython-312.pyc,,
+click/__pycache__/globals.cpython-312.pyc,,
+click/__pycache__/parser.cpython-312.pyc,,
+click/__pycache__/shell_completion.cpython-312.pyc,,
+click/__pycache__/termui.cpython-312.pyc,,
+click/__pycache__/testing.cpython-312.pyc,,
+click/__pycache__/types.cpython-312.pyc,,
+click/__pycache__/utils.cpython-312.pyc,,
+click/_compat.py,sha256=v3xBZkFbvA1BXPRkFfBJc6-pIwPI7345m-kQEnpVAs4,18693
+click/_termui_impl.py,sha256=rgCb3On8X5A4200rA5L6i13u5iapmFer7sru57Jy6zA,27093
+click/_textwrap.py,sha256=BOae0RQ6vg3FkNgSJyOoGzG1meGMxJ_ukWVZKx_v-0o,1400
+click/_utils.py,sha256=kZwtTf5gMuCilJJceS2iTCvRvCY-0aN5rJq8gKw7p8g,943
+click/_winconsole.py,sha256=_vxUuUaxwBhoR0vUWCNuHY8VUefiMdCIyU2SXPqoF-A,8465
+click/core.py,sha256=U6Bfxt8GkjNDqyJ0HqXvluJHtyZ4sY5USAvM1Cdq7mQ,132105
+click/decorators.py,sha256=5P7abhJtAQYp_KHgjUvhMv464ERwOzrv2enNknlwHyQ,18461
+click/exceptions.py,sha256=8utf8w6V5hJXMnO_ic1FNrtbwuEn1NUu1aDwV8UqnG4,9954
+click/formatting.py,sha256=RVfwwr0rwWNpgGr8NaHodPzkIr7_tUyVh_nDdanLMNc,9730
+click/globals.py,sha256=gM-Nh6A4M0HB_SgkaF5M4ncGGMDHc_flHXu9_oh4GEU,1923
+click/parser.py,sha256=Q31pH0FlQZEq-UXE_ABRzlygEfvxPTuZbWNh4xfXmzw,19010
+click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+click/shell_completion.py,sha256=Cc4GQUFuWpfQBa9sF5qXeeYI7n3tI_1k6ZdSn4BZbT0,20994
+click/termui.py,sha256=hqCEjNndU-nzW08nRAkBaVgfZp_FdCA9KxfIWlKYaMc,31037
+click/testing.py,sha256=EERbzcl1br0mW0qBS9EqkknfNfXB9WQEW0ELIpkvuSs,19102
+click/types.py,sha256=ek54BNSFwPKsqtfT7jsqcc4WHui8AIFVMKM4oVZIXhc,39927
+click/utils.py,sha256=gCUoewdAhA-QLBUUHxrLh4uj6m7T1WjZZMNPvR0I7YA,20257
diff --git a/Backend/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/WHEEL
new file mode 100644
index 00000000..d8b9936d
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: flit 3.12.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/Backend/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/licenses/LICENSE.txt b/Backend/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/licenses/LICENSE.txt
new file mode 100644
index 00000000..d12a8491
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click-8.3.1.dist-info/licenses/LICENSE.txt
@@ -0,0 +1,28 @@
+Copyright 2014 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/Backend/venv/lib/python3.12/site-packages/click/__init__.py b/Backend/venv/lib/python3.12/site-packages/click/__init__.py
new file mode 100644
index 00000000..1aa547c5
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click/__init__.py
@@ -0,0 +1,123 @@
+"""
+Click is a simple Python module inspired by the stdlib optparse to make
+writing command line scripts fun. Unlike other modules, it's based
+around a simple API that does not come with too much magic and is
+composable.
+"""
+
+from __future__ import annotations
+
+from .core import Argument as Argument
+from .core import Command as Command
+from .core import CommandCollection as CommandCollection
+from .core import Context as Context
+from .core import Group as Group
+from .core import Option as Option
+from .core import Parameter as Parameter
+from .decorators import argument as argument
+from .decorators import command as command
+from .decorators import confirmation_option as confirmation_option
+from .decorators import group as group
+from .decorators import help_option as help_option
+from .decorators import make_pass_decorator as make_pass_decorator
+from .decorators import option as option
+from .decorators import pass_context as pass_context
+from .decorators import pass_obj as pass_obj
+from .decorators import password_option as password_option
+from .decorators import version_option as version_option
+from .exceptions import Abort as Abort
+from .exceptions import BadArgumentUsage as BadArgumentUsage
+from .exceptions import BadOptionUsage as BadOptionUsage
+from .exceptions import BadParameter as BadParameter
+from .exceptions import ClickException as ClickException
+from .exceptions import FileError as FileError
+from .exceptions import MissingParameter as MissingParameter
+from .exceptions import NoSuchOption as NoSuchOption
+from .exceptions import UsageError as UsageError
+from .formatting import HelpFormatter as HelpFormatter
+from .formatting import wrap_text as wrap_text
+from .globals import get_current_context as get_current_context
+from .termui import clear as clear
+from .termui import confirm as confirm
+from .termui import echo_via_pager as echo_via_pager
+from .termui import edit as edit
+from .termui import getchar as getchar
+from .termui import launch as launch
+from .termui import pause as pause
+from .termui import progressbar as progressbar
+from .termui import prompt as prompt
+from .termui import secho as secho
+from .termui import style as style
+from .termui import unstyle as unstyle
+from .types import BOOL as BOOL
+from .types import Choice as Choice
+from .types import DateTime as DateTime
+from .types import File as File
+from .types import FLOAT as FLOAT
+from .types import FloatRange as FloatRange
+from .types import INT as INT
+from .types import IntRange as IntRange
+from .types import ParamType as ParamType
+from .types import Path as Path
+from .types import STRING as STRING
+from .types import Tuple as Tuple
+from .types import UNPROCESSED as UNPROCESSED
+from .types import UUID as UUID
+from .utils import echo as echo
+from .utils import format_filename as format_filename
+from .utils import get_app_dir as get_app_dir
+from .utils import get_binary_stream as get_binary_stream
+from .utils import get_text_stream as get_text_stream
+from .utils import open_file as open_file
+
+
+def __getattr__(name: str) -> object:
+ import warnings
+
+ if name == "BaseCommand":
+ from .core import _BaseCommand
+
+ warnings.warn(
+ "'BaseCommand' is deprecated and will be removed in Click 9.0. Use"
+ " 'Command' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return _BaseCommand
+
+ if name == "MultiCommand":
+ from .core import _MultiCommand
+
+ warnings.warn(
+ "'MultiCommand' is deprecated and will be removed in Click 9.0. Use"
+ " 'Group' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return _MultiCommand
+
+ if name == "OptionParser":
+ from .parser import _OptionParser
+
+ warnings.warn(
+ "'OptionParser' is deprecated and will be removed in Click 9.0. The"
+ " old parser is available in 'optparse'.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return _OptionParser
+
+ if name == "__version__":
+ import importlib.metadata
+ import warnings
+
+ warnings.warn(
+ "The '__version__' attribute is deprecated and will be removed in"
+ " Click 9.1. Use feature detection or"
+ " 'importlib.metadata.version(\"click\")' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return importlib.metadata.version("click")
+
+ raise AttributeError(name)
diff --git a/Backend/venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..a39f5956
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc
new file mode 100644
index 00000000..a9877932
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/_compat.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc
new file mode 100644
index 00000000..5acce5c6
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/_termui_impl.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc
new file mode 100644
index 00000000..607b1b6f
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/_textwrap.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/click/__pycache__/_utils.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/_utils.cpython-312.pyc
new file mode 100644
index 00000000..facb09f4
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/_utils.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc
new file mode 100644
index 00000000..3e386996
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/_winconsole.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc
new file mode 100644
index 00000000..91fee54f
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/core.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc
new file mode 100644
index 00000000..bcfce62d
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/decorators.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc
new file mode 100644
index 00000000..a261c107
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/exceptions.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc
new file mode 100644
index 00000000..c2943963
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/formatting.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc
new file mode 100644
index 00000000..eed6b381
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/globals.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc
new file mode 100644
index 00000000..fe551f48
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/parser.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc
new file mode 100644
index 00000000..540816e3
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/shell_completion.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc
new file mode 100644
index 00000000..6398c5bc
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/termui.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc
new file mode 100644
index 00000000..e294081a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/testing.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc
new file mode 100644
index 00000000..f7ae641f
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/types.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc
new file mode 100644
index 00000000..107b79fe
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/click/__pycache__/utils.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/click/_compat.py b/Backend/venv/lib/python3.12/site-packages/click/_compat.py
new file mode 100644
index 00000000..f2726b93
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click/_compat.py
@@ -0,0 +1,622 @@
+from __future__ import annotations
+
+import codecs
+import collections.abc as cabc
+import io
+import os
+import re
+import sys
+import typing as t
+from types import TracebackType
+from weakref import WeakKeyDictionary
+
+CYGWIN = sys.platform.startswith("cygwin")
+WIN = sys.platform.startswith("win")
+auto_wrap_for_ansi: t.Callable[[t.TextIO], t.TextIO] | None = None
+_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]")
+
+
+def _make_text_stream(
+ stream: t.BinaryIO,
+ encoding: str | None,
+ errors: str | None,
+ force_readable: bool = False,
+ force_writable: bool = False,
+) -> t.TextIO:
+ if encoding is None:
+ encoding = get_best_encoding(stream)
+ if errors is None:
+ errors = "replace"
+ return _NonClosingTextIOWrapper(
+ stream,
+ encoding,
+ errors,
+ line_buffering=True,
+ force_readable=force_readable,
+ force_writable=force_writable,
+ )
+
+
+def is_ascii_encoding(encoding: str) -> bool:
+ """Checks if a given encoding is ascii."""
+ try:
+ return codecs.lookup(encoding).name == "ascii"
+ except LookupError:
+ return False
+
+
+def get_best_encoding(stream: t.IO[t.Any]) -> str:
+ """Returns the default stream encoding if not found."""
+ rv = getattr(stream, "encoding", None) or sys.getdefaultencoding()
+ if is_ascii_encoding(rv):
+ return "utf-8"
+ return rv
+
+
+class _NonClosingTextIOWrapper(io.TextIOWrapper):
+ def __init__(
+ self,
+ stream: t.BinaryIO,
+ encoding: str | None,
+ errors: str | None,
+ force_readable: bool = False,
+ force_writable: bool = False,
+ **extra: t.Any,
+ ) -> None:
+ self._stream = stream = t.cast(
+ t.BinaryIO, _FixupStream(stream, force_readable, force_writable)
+ )
+ super().__init__(stream, encoding, errors, **extra)
+
+ def __del__(self) -> None:
+ try:
+ self.detach()
+ except Exception:
+ pass
+
+ def isatty(self) -> bool:
+ # https://bitbucket.org/pypy/pypy/issue/1803
+ return self._stream.isatty()
+
+
+class _FixupStream:
+ """The new io interface needs more from streams than streams
+ traditionally implement. As such, this fix-up code is necessary in
+ some circumstances.
+
+ The forcing of readable and writable flags are there because some tools
+ put badly patched objects on sys (one such offender are certain version
+ of jupyter notebook).
+ """
+
+ def __init__(
+ self,
+ stream: t.BinaryIO,
+ force_readable: bool = False,
+ force_writable: bool = False,
+ ):
+ self._stream = stream
+ self._force_readable = force_readable
+ self._force_writable = force_writable
+
+ def __getattr__(self, name: str) -> t.Any:
+ return getattr(self._stream, name)
+
+ def read1(self, size: int) -> bytes:
+ f = getattr(self._stream, "read1", None)
+
+ if f is not None:
+ return t.cast(bytes, f(size))
+
+ return self._stream.read(size)
+
+ def readable(self) -> bool:
+ if self._force_readable:
+ return True
+ x = getattr(self._stream, "readable", None)
+ if x is not None:
+ return t.cast(bool, x())
+ try:
+ self._stream.read(0)
+ except Exception:
+ return False
+ return True
+
+ def writable(self) -> bool:
+ if self._force_writable:
+ return True
+ x = getattr(self._stream, "writable", None)
+ if x is not None:
+ return t.cast(bool, x())
+ try:
+ self._stream.write(b"")
+ except Exception:
+ try:
+ self._stream.write(b"")
+ except Exception:
+ return False
+ return True
+
+ def seekable(self) -> bool:
+ x = getattr(self._stream, "seekable", None)
+ if x is not None:
+ return t.cast(bool, x())
+ try:
+ self._stream.seek(self._stream.tell())
+ except Exception:
+ return False
+ return True
+
+
+def _is_binary_reader(stream: t.IO[t.Any], default: bool = False) -> bool:
+ try:
+ return isinstance(stream.read(0), bytes)
+ except Exception:
+ return default
+ # This happens in some cases where the stream was already
+ # closed. In this case, we assume the default.
+
+
+def _is_binary_writer(stream: t.IO[t.Any], default: bool = False) -> bool:
+ try:
+ stream.write(b"")
+ except Exception:
+ try:
+ stream.write("")
+ return False
+ except Exception:
+ pass
+ return default
+ return True
+
+
+def _find_binary_reader(stream: t.IO[t.Any]) -> t.BinaryIO | None:
+ # We need to figure out if the given stream is already binary.
+ # This can happen because the official docs recommend detaching
+ # the streams to get binary streams. Some code might do this, so
+ # we need to deal with this case explicitly.
+ if _is_binary_reader(stream, False):
+ return t.cast(t.BinaryIO, stream)
+
+ buf = getattr(stream, "buffer", None)
+
+ # Same situation here; this time we assume that the buffer is
+ # actually binary in case it's closed.
+ if buf is not None and _is_binary_reader(buf, True):
+ return t.cast(t.BinaryIO, buf)
+
+ return None
+
+
+def _find_binary_writer(stream: t.IO[t.Any]) -> t.BinaryIO | None:
+ # We need to figure out if the given stream is already binary.
+ # This can happen because the official docs recommend detaching
+ # the streams to get binary streams. Some code might do this, so
+ # we need to deal with this case explicitly.
+ if _is_binary_writer(stream, False):
+ return t.cast(t.BinaryIO, stream)
+
+ buf = getattr(stream, "buffer", None)
+
+ # Same situation here; this time we assume that the buffer is
+ # actually binary in case it's closed.
+ if buf is not None and _is_binary_writer(buf, True):
+ return t.cast(t.BinaryIO, buf)
+
+ return None
+
+
+def _stream_is_misconfigured(stream: t.TextIO) -> bool:
+ """A stream is misconfigured if its encoding is ASCII."""
+ # If the stream does not have an encoding set, we assume it's set
+ # to ASCII. This appears to happen in certain unittest
+ # environments. It's not quite clear what the correct behavior is
+ # but this at least will force Click to recover somehow.
+ return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii")
+
+
+def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: str | None) -> bool:
+ """A stream attribute is compatible if it is equal to the
+ desired value or the desired value is unset and the attribute
+ has a value.
+ """
+ stream_value = getattr(stream, attr, None)
+ return stream_value == value or (value is None and stream_value is not None)
+
+
+def _is_compatible_text_stream(
+ stream: t.TextIO, encoding: str | None, errors: str | None
+) -> bool:
+ """Check if a stream's encoding and errors attributes are
+ compatible with the desired values.
+ """
+ return _is_compat_stream_attr(
+ stream, "encoding", encoding
+ ) and _is_compat_stream_attr(stream, "errors", errors)
+
+
+def _force_correct_text_stream(
+ text_stream: t.IO[t.Any],
+ encoding: str | None,
+ errors: str | None,
+ is_binary: t.Callable[[t.IO[t.Any], bool], bool],
+ find_binary: t.Callable[[t.IO[t.Any]], t.BinaryIO | None],
+ force_readable: bool = False,
+ force_writable: bool = False,
+) -> t.TextIO:
+ if is_binary(text_stream, False):
+ binary_reader = t.cast(t.BinaryIO, text_stream)
+ else:
+ text_stream = t.cast(t.TextIO, text_stream)
+ # If the stream looks compatible, and won't default to a
+ # misconfigured ascii encoding, return it as-is.
+ if _is_compatible_text_stream(text_stream, encoding, errors) and not (
+ encoding is None and _stream_is_misconfigured(text_stream)
+ ):
+ return text_stream
+
+ # Otherwise, get the underlying binary reader.
+ possible_binary_reader = find_binary(text_stream)
+
+ # If that's not possible, silently use the original reader
+ # and get mojibake instead of exceptions.
+ if possible_binary_reader is None:
+ return text_stream
+
+ binary_reader = possible_binary_reader
+
+ # Default errors to replace instead of strict in order to get
+ # something that works.
+ if errors is None:
+ errors = "replace"
+
+ # Wrap the binary stream in a text stream with the correct
+ # encoding parameters.
+ return _make_text_stream(
+ binary_reader,
+ encoding,
+ errors,
+ force_readable=force_readable,
+ force_writable=force_writable,
+ )
+
+
+def _force_correct_text_reader(
+ text_reader: t.IO[t.Any],
+ encoding: str | None,
+ errors: str | None,
+ force_readable: bool = False,
+) -> t.TextIO:
+ return _force_correct_text_stream(
+ text_reader,
+ encoding,
+ errors,
+ _is_binary_reader,
+ _find_binary_reader,
+ force_readable=force_readable,
+ )
+
+
+def _force_correct_text_writer(
+ text_writer: t.IO[t.Any],
+ encoding: str | None,
+ errors: str | None,
+ force_writable: bool = False,
+) -> t.TextIO:
+ return _force_correct_text_stream(
+ text_writer,
+ encoding,
+ errors,
+ _is_binary_writer,
+ _find_binary_writer,
+ force_writable=force_writable,
+ )
+
+
+def get_binary_stdin() -> t.BinaryIO:
+ reader = _find_binary_reader(sys.stdin)
+ if reader is None:
+ raise RuntimeError("Was not able to determine binary stream for sys.stdin.")
+ return reader
+
+
+def get_binary_stdout() -> t.BinaryIO:
+ writer = _find_binary_writer(sys.stdout)
+ if writer is None:
+ raise RuntimeError("Was not able to determine binary stream for sys.stdout.")
+ return writer
+
+
+def get_binary_stderr() -> t.BinaryIO:
+ writer = _find_binary_writer(sys.stderr)
+ if writer is None:
+ raise RuntimeError("Was not able to determine binary stream for sys.stderr.")
+ return writer
+
+
+def get_text_stdin(encoding: str | None = None, errors: str | None = None) -> t.TextIO:
+ rv = _get_windows_console_stream(sys.stdin, encoding, errors)
+ if rv is not None:
+ return rv
+ return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True)
+
+
+def get_text_stdout(encoding: str | None = None, errors: str | None = None) -> t.TextIO:
+ rv = _get_windows_console_stream(sys.stdout, encoding, errors)
+ if rv is not None:
+ return rv
+ return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True)
+
+
+def get_text_stderr(encoding: str | None = None, errors: str | None = None) -> t.TextIO:
+ rv = _get_windows_console_stream(sys.stderr, encoding, errors)
+ if rv is not None:
+ return rv
+ return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True)
+
+
+def _wrap_io_open(
+ file: str | os.PathLike[str] | int,
+ mode: str,
+ encoding: str | None,
+ errors: str | None,
+) -> t.IO[t.Any]:
+ """Handles not passing ``encoding`` and ``errors`` in binary mode."""
+ if "b" in mode:
+ return open(file, mode)
+
+ return open(file, mode, encoding=encoding, errors=errors)
+
+
+def open_stream(
+ filename: str | os.PathLike[str],
+ mode: str = "r",
+ encoding: str | None = None,
+ errors: str | None = "strict",
+ atomic: bool = False,
+) -> tuple[t.IO[t.Any], bool]:
+ binary = "b" in mode
+ filename = os.fspath(filename)
+
+ # Standard streams first. These are simple because they ignore the
+ # atomic flag. Use fsdecode to handle Path("-").
+ if os.fsdecode(filename) == "-":
+ if any(m in mode for m in ["w", "a", "x"]):
+ if binary:
+ return get_binary_stdout(), False
+ return get_text_stdout(encoding=encoding, errors=errors), False
+ if binary:
+ return get_binary_stdin(), False
+ return get_text_stdin(encoding=encoding, errors=errors), False
+
+ # Non-atomic writes directly go out through the regular open functions.
+ if not atomic:
+ return _wrap_io_open(filename, mode, encoding, errors), True
+
+ # Some usability stuff for atomic writes
+ if "a" in mode:
+ raise ValueError(
+ "Appending to an existing file is not supported, because that"
+ " would involve an expensive `copy`-operation to a temporary"
+ " file. Open the file in normal `w`-mode and copy explicitly"
+ " if that's what you're after."
+ )
+ if "x" in mode:
+ raise ValueError("Use the `overwrite`-parameter instead.")
+ if "w" not in mode:
+ raise ValueError("Atomic writes only make sense with `w`-mode.")
+
+ # Atomic writes are more complicated. They work by opening a file
+ # as a proxy in the same folder and then using the fdopen
+ # functionality to wrap it in a Python file. Then we wrap it in an
+ # atomic file that moves the file over on close.
+ import errno
+ import random
+
+ try:
+ perm: int | None = os.stat(filename).st_mode
+ except OSError:
+ perm = None
+
+ flags = os.O_RDWR | os.O_CREAT | os.O_EXCL
+
+ if binary:
+ flags |= getattr(os, "O_BINARY", 0)
+
+ while True:
+ tmp_filename = os.path.join(
+ os.path.dirname(filename),
+ f".__atomic-write{random.randrange(1 << 32):08x}",
+ )
+ try:
+ fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm)
+ break
+ except OSError as e:
+ if e.errno == errno.EEXIST or (
+ os.name == "nt"
+ and e.errno == errno.EACCES
+ and os.path.isdir(e.filename)
+ and os.access(e.filename, os.W_OK)
+ ):
+ continue
+ raise
+
+ if perm is not None:
+ os.chmod(tmp_filename, perm) # in case perm includes bits in umask
+
+ f = _wrap_io_open(fd, mode, encoding, errors)
+ af = _AtomicFile(f, tmp_filename, os.path.realpath(filename))
+ return t.cast(t.IO[t.Any], af), True
+
+
+class _AtomicFile:
+ def __init__(self, f: t.IO[t.Any], tmp_filename: str, real_filename: str) -> None:
+ self._f = f
+ self._tmp_filename = tmp_filename
+ self._real_filename = real_filename
+ self.closed = False
+
+ @property
+ def name(self) -> str:
+ return self._real_filename
+
+ def close(self, delete: bool = False) -> None:
+ if self.closed:
+ return
+ self._f.close()
+ os.replace(self._tmp_filename, self._real_filename)
+ self.closed = True
+
+ def __getattr__(self, name: str) -> t.Any:
+ return getattr(self._f, name)
+
+ def __enter__(self) -> _AtomicFile:
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ tb: TracebackType | None,
+ ) -> None:
+ self.close(delete=exc_type is not None)
+
+ def __repr__(self) -> str:
+ return repr(self._f)
+
+
+def strip_ansi(value: str) -> str:
+ return _ansi_re.sub("", value)
+
+
+def _is_jupyter_kernel_output(stream: t.IO[t.Any]) -> bool:
+ while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)):
+ stream = stream._stream
+
+ return stream.__class__.__module__.startswith("ipykernel.")
+
+
+def should_strip_ansi(
+ stream: t.IO[t.Any] | None = None, color: bool | None = None
+) -> bool:
+ if color is None:
+ if stream is None:
+ stream = sys.stdin
+ return not isatty(stream) and not _is_jupyter_kernel_output(stream)
+ return not color
+
+
+# On Windows, wrap the output streams with colorama to support ANSI
+# color codes.
+# NOTE: double check is needed so mypy does not analyze this on Linux
+if sys.platform.startswith("win") and WIN:
+ from ._winconsole import _get_windows_console_stream
+
+ def _get_argv_encoding() -> str:
+ import locale
+
+ return locale.getpreferredencoding()
+
+ _ansi_stream_wrappers: cabc.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary()
+
+ def auto_wrap_for_ansi(stream: t.TextIO, color: bool | None = None) -> t.TextIO:
+ """Support ANSI color and style codes on Windows by wrapping a
+ stream with colorama.
+ """
+ try:
+ cached = _ansi_stream_wrappers.get(stream)
+ except Exception:
+ cached = None
+
+ if cached is not None:
+ return cached
+
+ import colorama
+
+ strip = should_strip_ansi(stream, color)
+ ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip)
+ rv = t.cast(t.TextIO, ansi_wrapper.stream)
+ _write = rv.write
+
+ def _safe_write(s: str) -> int:
+ try:
+ return _write(s)
+ except BaseException:
+ ansi_wrapper.reset_all()
+ raise
+
+ rv.write = _safe_write # type: ignore[method-assign]
+
+ try:
+ _ansi_stream_wrappers[stream] = rv
+ except Exception:
+ pass
+
+ return rv
+
+else:
+
+ def _get_argv_encoding() -> str:
+ return getattr(sys.stdin, "encoding", None) or sys.getfilesystemencoding()
+
+ def _get_windows_console_stream(
+ f: t.TextIO, encoding: str | None, errors: str | None
+ ) -> t.TextIO | None:
+ return None
+
+
+def term_len(x: str) -> int:
+ return len(strip_ansi(x))
+
+
+def isatty(stream: t.IO[t.Any]) -> bool:
+ try:
+ return stream.isatty()
+ except Exception:
+ return False
+
+
+def _make_cached_stream_func(
+ src_func: t.Callable[[], t.TextIO | None],
+ wrapper_func: t.Callable[[], t.TextIO],
+) -> t.Callable[[], t.TextIO | None]:
+ cache: cabc.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary()
+
+ def func() -> t.TextIO | None:
+ stream = src_func()
+
+ if stream is None:
+ return None
+
+ try:
+ rv = cache.get(stream)
+ except Exception:
+ rv = None
+ if rv is not None:
+ return rv
+ rv = wrapper_func()
+ try:
+ cache[stream] = rv
+ except Exception:
+ pass
+ return rv
+
+ return func
+
+
+_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin)
+_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout)
+_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr)
+
+
+binary_streams: cabc.Mapping[str, t.Callable[[], t.BinaryIO]] = {
+ "stdin": get_binary_stdin,
+ "stdout": get_binary_stdout,
+ "stderr": get_binary_stderr,
+}
+
+text_streams: cabc.Mapping[str, t.Callable[[str | None, str | None], t.TextIO]] = {
+ "stdin": get_text_stdin,
+ "stdout": get_text_stdout,
+ "stderr": get_text_stderr,
+}
diff --git a/Backend/venv/lib/python3.12/site-packages/click/_termui_impl.py b/Backend/venv/lib/python3.12/site-packages/click/_termui_impl.py
new file mode 100644
index 00000000..ee8225c4
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click/_termui_impl.py
@@ -0,0 +1,852 @@
+"""
+This module contains implementations for the termui module. To keep the
+import time of Click down, some infrequently used functionality is
+placed in this module and only imported as needed.
+"""
+
+from __future__ import annotations
+
+import collections.abc as cabc
+import contextlib
+import math
+import os
+import shlex
+import sys
+import time
+import typing as t
+from gettext import gettext as _
+from io import StringIO
+from pathlib import Path
+from types import TracebackType
+
+from ._compat import _default_text_stdout
+from ._compat import CYGWIN
+from ._compat import get_best_encoding
+from ._compat import isatty
+from ._compat import open_stream
+from ._compat import strip_ansi
+from ._compat import term_len
+from ._compat import WIN
+from .exceptions import ClickException
+from .utils import echo
+
+V = t.TypeVar("V")
+
+if os.name == "nt":
+ BEFORE_BAR = "\r"
+ AFTER_BAR = "\n"
+else:
+ BEFORE_BAR = "\r\033[?25l"
+ AFTER_BAR = "\033[?25h\n"
+
+
+class ProgressBar(t.Generic[V]):
+ def __init__(
+ self,
+ iterable: cabc.Iterable[V] | None,
+ length: int | None = None,
+ fill_char: str = "#",
+ empty_char: str = " ",
+ bar_template: str = "%(bar)s",
+ info_sep: str = " ",
+ hidden: bool = False,
+ show_eta: bool = True,
+ show_percent: bool | None = None,
+ show_pos: bool = False,
+ item_show_func: t.Callable[[V | None], str | None] | None = None,
+ label: str | None = None,
+ file: t.TextIO | None = None,
+ color: bool | None = None,
+ update_min_steps: int = 1,
+ width: int = 30,
+ ) -> None:
+ self.fill_char = fill_char
+ self.empty_char = empty_char
+ self.bar_template = bar_template
+ self.info_sep = info_sep
+ self.hidden = hidden
+ self.show_eta = show_eta
+ self.show_percent = show_percent
+ self.show_pos = show_pos
+ self.item_show_func = item_show_func
+ self.label: str = label or ""
+
+ if file is None:
+ file = _default_text_stdout()
+
+ # There are no standard streams attached to write to. For example,
+ # pythonw on Windows.
+ if file is None:
+ file = StringIO()
+
+ self.file = file
+ self.color = color
+ self.update_min_steps = update_min_steps
+ self._completed_intervals = 0
+ self.width: int = width
+ self.autowidth: bool = width == 0
+
+ if length is None:
+ from operator import length_hint
+
+ length = length_hint(iterable, -1)
+
+ if length == -1:
+ length = None
+ if iterable is None:
+ if length is None:
+ raise TypeError("iterable or length is required")
+ iterable = t.cast("cabc.Iterable[V]", range(length))
+ self.iter: cabc.Iterable[V] = iter(iterable)
+ self.length = length
+ self.pos: int = 0
+ self.avg: list[float] = []
+ self.last_eta: float
+ self.start: float
+ self.start = self.last_eta = time.time()
+ self.eta_known: bool = False
+ self.finished: bool = False
+ self.max_width: int | None = None
+ self.entered: bool = False
+ self.current_item: V | None = None
+ self._is_atty = isatty(self.file)
+ self._last_line: str | None = None
+
+ def __enter__(self) -> ProgressBar[V]:
+ self.entered = True
+ self.render_progress()
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ tb: TracebackType | None,
+ ) -> None:
+ self.render_finish()
+
+ def __iter__(self) -> cabc.Iterator[V]:
+ if not self.entered:
+ raise RuntimeError("You need to use progress bars in a with block.")
+ self.render_progress()
+ return self.generator()
+
+ def __next__(self) -> V:
+ # Iteration is defined in terms of a generator function,
+ # returned by iter(self); use that to define next(). This works
+ # because `self.iter` is an iterable consumed by that generator,
+ # so it is re-entry safe. Calling `next(self.generator())`
+ # twice works and does "what you want".
+ return next(iter(self))
+
+ def render_finish(self) -> None:
+ if self.hidden or not self._is_atty:
+ return
+ self.file.write(AFTER_BAR)
+ self.file.flush()
+
+ @property
+ def pct(self) -> float:
+ if self.finished:
+ return 1.0
+ return min(self.pos / (float(self.length or 1) or 1), 1.0)
+
+ @property
+ def time_per_iteration(self) -> float:
+ if not self.avg:
+ return 0.0
+ return sum(self.avg) / float(len(self.avg))
+
+ @property
+ def eta(self) -> float:
+ if self.length is not None and not self.finished:
+ return self.time_per_iteration * (self.length - self.pos)
+ return 0.0
+
+ def format_eta(self) -> str:
+ if self.eta_known:
+ t = int(self.eta)
+ seconds = t % 60
+ t //= 60
+ minutes = t % 60
+ t //= 60
+ hours = t % 24
+ t //= 24
+ if t > 0:
+ return f"{t}d {hours:02}:{minutes:02}:{seconds:02}"
+ else:
+ return f"{hours:02}:{minutes:02}:{seconds:02}"
+ return ""
+
+ def format_pos(self) -> str:
+ pos = str(self.pos)
+ if self.length is not None:
+ pos += f"/{self.length}"
+ return pos
+
+ def format_pct(self) -> str:
+ return f"{int(self.pct * 100): 4}%"[1:]
+
+ def format_bar(self) -> str:
+ if self.length is not None:
+ bar_length = int(self.pct * self.width)
+ bar = self.fill_char * bar_length
+ bar += self.empty_char * (self.width - bar_length)
+ elif self.finished:
+ bar = self.fill_char * self.width
+ else:
+ chars = list(self.empty_char * (self.width or 1))
+ if self.time_per_iteration != 0:
+ chars[
+ int(
+ (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5)
+ * self.width
+ )
+ ] = self.fill_char
+ bar = "".join(chars)
+ return bar
+
+ def format_progress_line(self) -> str:
+ show_percent = self.show_percent
+
+ info_bits = []
+ if self.length is not None and show_percent is None:
+ show_percent = not self.show_pos
+
+ if self.show_pos:
+ info_bits.append(self.format_pos())
+ if show_percent:
+ info_bits.append(self.format_pct())
+ if self.show_eta and self.eta_known and not self.finished:
+ info_bits.append(self.format_eta())
+ if self.item_show_func is not None:
+ item_info = self.item_show_func(self.current_item)
+ if item_info is not None:
+ info_bits.append(item_info)
+
+ return (
+ self.bar_template
+ % {
+ "label": self.label,
+ "bar": self.format_bar(),
+ "info": self.info_sep.join(info_bits),
+ }
+ ).rstrip()
+
+ def render_progress(self) -> None:
+ if self.hidden:
+ return
+
+ if not self._is_atty:
+ # Only output the label once if the output is not a TTY.
+ if self._last_line != self.label:
+ self._last_line = self.label
+ echo(self.label, file=self.file, color=self.color)
+ return
+
+ buf = []
+ # Update width in case the terminal has been resized
+ if self.autowidth:
+ import shutil
+
+ old_width = self.width
+ self.width = 0
+ clutter_length = term_len(self.format_progress_line())
+ new_width = max(0, shutil.get_terminal_size().columns - clutter_length)
+ if new_width < old_width and self.max_width is not None:
+ buf.append(BEFORE_BAR)
+ buf.append(" " * self.max_width)
+ self.max_width = new_width
+ self.width = new_width
+
+ clear_width = self.width
+ if self.max_width is not None:
+ clear_width = self.max_width
+
+ buf.append(BEFORE_BAR)
+ line = self.format_progress_line()
+ line_len = term_len(line)
+ if self.max_width is None or self.max_width < line_len:
+ self.max_width = line_len
+
+ buf.append(line)
+ buf.append(" " * (clear_width - line_len))
+ line = "".join(buf)
+ # Render the line only if it changed.
+
+ if line != self._last_line:
+ self._last_line = line
+ echo(line, file=self.file, color=self.color, nl=False)
+ self.file.flush()
+
+ def make_step(self, n_steps: int) -> None:
+ self.pos += n_steps
+ if self.length is not None and self.pos >= self.length:
+ self.finished = True
+
+ if (time.time() - self.last_eta) < 1.0:
+ return
+
+ self.last_eta = time.time()
+
+ # self.avg is a rolling list of length <= 7 of steps where steps are
+ # defined as time elapsed divided by the total progress through
+ # self.length.
+ if self.pos:
+ step = (time.time() - self.start) / self.pos
+ else:
+ step = time.time() - self.start
+
+ self.avg = self.avg[-6:] + [step]
+
+ self.eta_known = self.length is not None
+
+ def update(self, n_steps: int, current_item: V | None = None) -> None:
+ """Update the progress bar by advancing a specified number of
+ steps, and optionally set the ``current_item`` for this new
+ position.
+
+ :param n_steps: Number of steps to advance.
+ :param current_item: Optional item to set as ``current_item``
+ for the updated position.
+
+ .. versionchanged:: 8.0
+ Added the ``current_item`` optional parameter.
+
+ .. versionchanged:: 8.0
+ Only render when the number of steps meets the
+ ``update_min_steps`` threshold.
+ """
+ if current_item is not None:
+ self.current_item = current_item
+
+ self._completed_intervals += n_steps
+
+ if self._completed_intervals >= self.update_min_steps:
+ self.make_step(self._completed_intervals)
+ self.render_progress()
+ self._completed_intervals = 0
+
+ def finish(self) -> None:
+ self.eta_known = False
+ self.current_item = None
+ self.finished = True
+
+ def generator(self) -> cabc.Iterator[V]:
+ """Return a generator which yields the items added to the bar
+ during construction, and updates the progress bar *after* the
+ yielded block returns.
+ """
+ # WARNING: the iterator interface for `ProgressBar` relies on
+ # this and only works because this is a simple generator which
+ # doesn't create or manage additional state. If this function
+ # changes, the impact should be evaluated both against
+ # `iter(bar)` and `next(bar)`. `next()` in particular may call
+ # `self.generator()` repeatedly, and this must remain safe in
+ # order for that interface to work.
+ if not self.entered:
+ raise RuntimeError("You need to use progress bars in a with block.")
+
+ if not self._is_atty:
+ yield from self.iter
+ else:
+ for rv in self.iter:
+ self.current_item = rv
+
+ # This allows show_item_func to be updated before the
+ # item is processed. Only trigger at the beginning of
+ # the update interval.
+ if self._completed_intervals == 0:
+ self.render_progress()
+
+ yield rv
+ self.update(1)
+
+ self.finish()
+ self.render_progress()
+
+
+def pager(generator: cabc.Iterable[str], color: bool | None = None) -> None:
+ """Decide what method to use for paging through text."""
+ stdout = _default_text_stdout()
+
+ # There are no standard streams attached to write to. For example,
+ # pythonw on Windows.
+ if stdout is None:
+ stdout = StringIO()
+
+ if not isatty(sys.stdin) or not isatty(stdout):
+ return _nullpager(stdout, generator, color)
+
+ # Split and normalize the pager command into parts.
+ pager_cmd_parts = shlex.split(os.environ.get("PAGER", ""), posix=False)
+ if pager_cmd_parts:
+ if WIN:
+ if _tempfilepager(generator, pager_cmd_parts, color):
+ return
+ elif _pipepager(generator, pager_cmd_parts, color):
+ return
+
+ if os.environ.get("TERM") in ("dumb", "emacs"):
+ return _nullpager(stdout, generator, color)
+ if (WIN or sys.platform.startswith("os2")) and _tempfilepager(
+ generator, ["more"], color
+ ):
+ return
+ if _pipepager(generator, ["less"], color):
+ return
+
+ import tempfile
+
+ fd, filename = tempfile.mkstemp()
+ os.close(fd)
+ try:
+ if _pipepager(generator, ["more"], color):
+ return
+ return _nullpager(stdout, generator, color)
+ finally:
+ os.unlink(filename)
+
+
+def _pipepager(
+ generator: cabc.Iterable[str], cmd_parts: list[str], color: bool | None
+) -> bool:
+ """Page through text by feeding it to another program. Invoking a
+ pager through this might support colors.
+
+ Returns `True` if the command was found, `False` otherwise and thus another
+ pager should be attempted.
+ """
+ # Split the command into the invoked CLI and its parameters.
+ if not cmd_parts:
+ return False
+
+ import shutil
+
+ cmd = cmd_parts[0]
+ cmd_params = cmd_parts[1:]
+
+ cmd_filepath = shutil.which(cmd)
+ if not cmd_filepath:
+ return False
+
+ # Produces a normalized absolute path string.
+ # multi-call binaries such as busybox derive their identity from the symlink
+ # less -> busybox. resolve() causes them to misbehave. (eg. less becomes busybox)
+ cmd_path = Path(cmd_filepath).absolute()
+ cmd_name = cmd_path.name
+
+ import subprocess
+
+ # Make a local copy of the environment to not affect the global one.
+ env = dict(os.environ)
+
+ # If we're piping to less and the user hasn't decided on colors, we enable
+ # them by default we find the -R flag in the command line arguments.
+ if color is None and cmd_name == "less":
+ less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_params)}"
+ if not less_flags:
+ env["LESS"] = "-R"
+ color = True
+ elif "r" in less_flags or "R" in less_flags:
+ color = True
+
+ c = subprocess.Popen(
+ [str(cmd_path)] + cmd_params,
+ shell=False,
+ stdin=subprocess.PIPE,
+ env=env,
+ errors="replace",
+ text=True,
+ )
+ assert c.stdin is not None
+ try:
+ for text in generator:
+ if not color:
+ text = strip_ansi(text)
+
+ c.stdin.write(text)
+ except BrokenPipeError:
+ # In case the pager exited unexpectedly, ignore the broken pipe error.
+ pass
+ except Exception as e:
+ # In case there is an exception we want to close the pager immediately
+ # and let the caller handle it.
+ # Otherwise the pager will keep running, and the user may not notice
+ # the error message, or worse yet it may leave the terminal in a broken state.
+ c.terminate()
+ raise e
+ finally:
+ # We must close stdin and wait for the pager to exit before we continue
+ try:
+ c.stdin.close()
+ # Close implies flush, so it might throw a BrokenPipeError if the pager
+ # process exited already.
+ except BrokenPipeError:
+ pass
+
+ # Less doesn't respect ^C, but catches it for its own UI purposes (aborting
+ # search or other commands inside less).
+ #
+ # That means when the user hits ^C, the parent process (click) terminates,
+ # but less is still alive, paging the output and messing up the terminal.
+ #
+ # If the user wants to make the pager exit on ^C, they should set
+ # `LESS='-K'`. It's not our decision to make.
+ while True:
+ try:
+ c.wait()
+ except KeyboardInterrupt:
+ pass
+ else:
+ break
+
+ return True
+
+
+def _tempfilepager(
+ generator: cabc.Iterable[str], cmd_parts: list[str], color: bool | None
+) -> bool:
+ """Page through text by invoking a program on a temporary file.
+
+ Returns `True` if the command was found, `False` otherwise and thus another
+ pager should be attempted.
+ """
+ # Split the command into the invoked CLI and its parameters.
+ if not cmd_parts:
+ return False
+
+ import shutil
+
+ cmd = cmd_parts[0]
+
+ cmd_filepath = shutil.which(cmd)
+ if not cmd_filepath:
+ return False
+ # Produces a normalized absolute path string.
+ # multi-call binaries such as busybox derive their identity from the symlink
+ # less -> busybox. resolve() causes them to misbehave. (eg. less becomes busybox)
+ cmd_path = Path(cmd_filepath).absolute()
+
+ import subprocess
+ import tempfile
+
+ fd, filename = tempfile.mkstemp()
+ # TODO: This never terminates if the passed generator never terminates.
+ text = "".join(generator)
+ if not color:
+ text = strip_ansi(text)
+ encoding = get_best_encoding(sys.stdout)
+ with open_stream(filename, "wb")[0] as f:
+ f.write(text.encode(encoding))
+ try:
+ subprocess.call([str(cmd_path), filename])
+ except OSError:
+ # Command not found
+ pass
+ finally:
+ os.close(fd)
+ os.unlink(filename)
+
+ return True
+
+
+def _nullpager(
+ stream: t.TextIO, generator: cabc.Iterable[str], color: bool | None
+) -> None:
+ """Simply print unformatted text. This is the ultimate fallback."""
+ for text in generator:
+ if not color:
+ text = strip_ansi(text)
+ stream.write(text)
+
+
+class Editor:
+ def __init__(
+ self,
+ editor: str | None = None,
+ env: cabc.Mapping[str, str] | None = None,
+ require_save: bool = True,
+ extension: str = ".txt",
+ ) -> None:
+ self.editor = editor
+ self.env = env
+ self.require_save = require_save
+ self.extension = extension
+
+ def get_editor(self) -> str:
+ if self.editor is not None:
+ return self.editor
+ for key in "VISUAL", "EDITOR":
+ rv = os.environ.get(key)
+ if rv:
+ return rv
+ if WIN:
+ return "notepad"
+
+ from shutil import which
+
+ for editor in "sensible-editor", "vim", "nano":
+ if which(editor) is not None:
+ return editor
+ return "vi"
+
+ def edit_files(self, filenames: cabc.Iterable[str]) -> None:
+ import subprocess
+
+ editor = self.get_editor()
+ environ: dict[str, str] | None = None
+
+ if self.env:
+ environ = os.environ.copy()
+ environ.update(self.env)
+
+ exc_filename = " ".join(f'"{filename}"' for filename in filenames)
+
+ try:
+ c = subprocess.Popen(
+ args=f"{editor} {exc_filename}", env=environ, shell=True
+ )
+ exit_code = c.wait()
+ if exit_code != 0:
+ raise ClickException(
+ _("{editor}: Editing failed").format(editor=editor)
+ )
+ except OSError as e:
+ raise ClickException(
+ _("{editor}: Editing failed: {e}").format(editor=editor, e=e)
+ ) from e
+
+ @t.overload
+ def edit(self, text: bytes | bytearray) -> bytes | None: ...
+
+ # We cannot know whether or not the type expected is str or bytes when None
+ # is passed, so str is returned as that was what was done before.
+ @t.overload
+ def edit(self, text: str | None) -> str | None: ...
+
+ def edit(self, text: str | bytes | bytearray | None) -> str | bytes | None:
+ import tempfile
+
+ if text is None:
+ data: bytes | bytearray = b""
+ elif isinstance(text, (bytes, bytearray)):
+ data = text
+ else:
+ if text and not text.endswith("\n"):
+ text += "\n"
+
+ if WIN:
+ data = text.replace("\n", "\r\n").encode("utf-8-sig")
+ else:
+ data = text.encode("utf-8")
+
+ fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension)
+ f: t.BinaryIO
+
+ try:
+ with os.fdopen(fd, "wb") as f:
+ f.write(data)
+
+ # If the filesystem resolution is 1 second, like Mac OS
+ # 10.12 Extended, or 2 seconds, like FAT32, and the editor
+ # closes very fast, require_save can fail. Set the modified
+ # time to be 2 seconds in the past to work around this.
+ os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2))
+ # Depending on the resolution, the exact value might not be
+ # recorded, so get the new recorded value.
+ timestamp = os.path.getmtime(name)
+
+ self.edit_files((name,))
+
+ if self.require_save and os.path.getmtime(name) == timestamp:
+ return None
+
+ with open(name, "rb") as f:
+ rv = f.read()
+
+ if isinstance(text, (bytes, bytearray)):
+ return rv
+
+ return rv.decode("utf-8-sig").replace("\r\n", "\n")
+ finally:
+ os.unlink(name)
+
+
+def open_url(url: str, wait: bool = False, locate: bool = False) -> int:
+ import subprocess
+
+ def _unquote_file(url: str) -> str:
+ from urllib.parse import unquote
+
+ if url.startswith("file://"):
+ url = unquote(url[7:])
+
+ return url
+
+ if sys.platform == "darwin":
+ args = ["open"]
+ if wait:
+ args.append("-W")
+ if locate:
+ args.append("-R")
+ args.append(_unquote_file(url))
+ null = open("/dev/null", "w")
+ try:
+ return subprocess.Popen(args, stderr=null).wait()
+ finally:
+ null.close()
+ elif WIN:
+ if locate:
+ url = _unquote_file(url)
+ args = ["explorer", f"/select,{url}"]
+ else:
+ args = ["start"]
+ if wait:
+ args.append("/WAIT")
+ args.append("")
+ args.append(url)
+ try:
+ return subprocess.call(args)
+ except OSError:
+ # Command not found
+ return 127
+ elif CYGWIN:
+ if locate:
+ url = _unquote_file(url)
+ args = ["cygstart", os.path.dirname(url)]
+ else:
+ args = ["cygstart"]
+ if wait:
+ args.append("-w")
+ args.append(url)
+ try:
+ return subprocess.call(args)
+ except OSError:
+ # Command not found
+ return 127
+
+ try:
+ if locate:
+ url = os.path.dirname(_unquote_file(url)) or "."
+ else:
+ url = _unquote_file(url)
+ c = subprocess.Popen(["xdg-open", url])
+ if wait:
+ return c.wait()
+ return 0
+ except OSError:
+ if url.startswith(("http://", "https://")) and not locate and not wait:
+ import webbrowser
+
+ webbrowser.open(url)
+ return 0
+ return 1
+
+
+def _translate_ch_to_exc(ch: str) -> None:
+ if ch == "\x03":
+ raise KeyboardInterrupt()
+
+ if ch == "\x04" and not WIN: # Unix-like, Ctrl+D
+ raise EOFError()
+
+ if ch == "\x1a" and WIN: # Windows, Ctrl+Z
+ raise EOFError()
+
+ return None
+
+
+if sys.platform == "win32":
+ import msvcrt
+
+ @contextlib.contextmanager
+ def raw_terminal() -> cabc.Iterator[int]:
+ yield -1
+
+ def getchar(echo: bool) -> str:
+ # The function `getch` will return a bytes object corresponding to
+ # the pressed character. Since Windows 10 build 1803, it will also
+ # return \x00 when called a second time after pressing a regular key.
+ #
+ # `getwch` does not share this probably-bugged behavior. Moreover, it
+ # returns a Unicode object by default, which is what we want.
+ #
+ # Either of these functions will return \x00 or \xe0 to indicate
+ # a special key, and you need to call the same function again to get
+ # the "rest" of the code. The fun part is that \u00e0 is
+ # "latin small letter a with grave", so if you type that on a French
+ # keyboard, you _also_ get a \xe0.
+ # E.g., consider the Up arrow. This returns \xe0 and then \x48. The
+ # resulting Unicode string reads as "a with grave" + "capital H".
+ # This is indistinguishable from when the user actually types
+ # "a with grave" and then "capital H".
+ #
+ # When \xe0 is returned, we assume it's part of a special-key sequence
+ # and call `getwch` again, but that means that when the user types
+ # the \u00e0 character, `getchar` doesn't return until a second
+ # character is typed.
+ # The alternative is returning immediately, but that would mess up
+ # cross-platform handling of arrow keys and others that start with
+ # \xe0. Another option is using `getch`, but then we can't reliably
+ # read non-ASCII characters, because return values of `getch` are
+ # limited to the current 8-bit codepage.
+ #
+ # Anyway, Click doesn't claim to do this Right(tm), and using `getwch`
+ # is doing the right thing in more situations than with `getch`.
+
+ if echo:
+ func = t.cast(t.Callable[[], str], msvcrt.getwche)
+ else:
+ func = t.cast(t.Callable[[], str], msvcrt.getwch)
+
+ rv = func()
+
+ if rv in ("\x00", "\xe0"):
+ # \x00 and \xe0 are control characters that indicate special key,
+ # see above.
+ rv += func()
+
+ _translate_ch_to_exc(rv)
+ return rv
+
+else:
+ import termios
+ import tty
+
+ @contextlib.contextmanager
+ def raw_terminal() -> cabc.Iterator[int]:
+ f: t.TextIO | None
+ fd: int
+
+ if not isatty(sys.stdin):
+ f = open("/dev/tty")
+ fd = f.fileno()
+ else:
+ fd = sys.stdin.fileno()
+ f = None
+
+ try:
+ old_settings = termios.tcgetattr(fd)
+
+ try:
+ tty.setraw(fd)
+ yield fd
+ finally:
+ termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
+ sys.stdout.flush()
+
+ if f is not None:
+ f.close()
+ except termios.error:
+ pass
+
+ def getchar(echo: bool) -> str:
+ with raw_terminal() as fd:
+ ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace")
+
+ if echo and isatty(sys.stdout):
+ sys.stdout.write(ch)
+
+ _translate_ch_to_exc(ch)
+ return ch
diff --git a/Backend/venv/lib/python3.12/site-packages/click/_textwrap.py b/Backend/venv/lib/python3.12/site-packages/click/_textwrap.py
new file mode 100644
index 00000000..97fbee3d
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click/_textwrap.py
@@ -0,0 +1,51 @@
+from __future__ import annotations
+
+import collections.abc as cabc
+import textwrap
+from contextlib import contextmanager
+
+
+class TextWrapper(textwrap.TextWrapper):
+ def _handle_long_word(
+ self,
+ reversed_chunks: list[str],
+ cur_line: list[str],
+ cur_len: int,
+ width: int,
+ ) -> None:
+ space_left = max(width - cur_len, 1)
+
+ if self.break_long_words:
+ last = reversed_chunks[-1]
+ cut = last[:space_left]
+ res = last[space_left:]
+ cur_line.append(cut)
+ reversed_chunks[-1] = res
+ elif not cur_line:
+ cur_line.append(reversed_chunks.pop())
+
+ @contextmanager
+ def extra_indent(self, indent: str) -> cabc.Iterator[None]:
+ old_initial_indent = self.initial_indent
+ old_subsequent_indent = self.subsequent_indent
+ self.initial_indent += indent
+ self.subsequent_indent += indent
+
+ try:
+ yield
+ finally:
+ self.initial_indent = old_initial_indent
+ self.subsequent_indent = old_subsequent_indent
+
+ def indent_only(self, text: str) -> str:
+ rv = []
+
+ for idx, line in enumerate(text.splitlines()):
+ indent = self.initial_indent
+
+ if idx > 0:
+ indent = self.subsequent_indent
+
+ rv.append(f"{indent}{line}")
+
+ return "\n".join(rv)
diff --git a/Backend/venv/lib/python3.12/site-packages/click/_utils.py b/Backend/venv/lib/python3.12/site-packages/click/_utils.py
new file mode 100644
index 00000000..09fb0085
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click/_utils.py
@@ -0,0 +1,36 @@
+from __future__ import annotations
+
+import enum
+import typing as t
+
+
+class Sentinel(enum.Enum):
+ """Enum used to define sentinel values.
+
+ .. seealso::
+
+ `PEP 661 - Sentinel Values `_.
+ """
+
+ UNSET = object()
+ FLAG_NEEDS_VALUE = object()
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}.{self.name}"
+
+
+UNSET = Sentinel.UNSET
+"""Sentinel used to indicate that a value is not set."""
+
+FLAG_NEEDS_VALUE = Sentinel.FLAG_NEEDS_VALUE
+"""Sentinel used to indicate an option was passed as a flag without a
+value but is not a flag option.
+
+``Option.consume_value`` uses this to prompt or use the ``flag_value``.
+"""
+
+T_UNSET = t.Literal[UNSET] # type: ignore[valid-type]
+"""Type hint for the :data:`UNSET` sentinel value."""
+
+T_FLAG_NEEDS_VALUE = t.Literal[FLAG_NEEDS_VALUE] # type: ignore[valid-type]
+"""Type hint for the :data:`FLAG_NEEDS_VALUE` sentinel value."""
diff --git a/Backend/venv/lib/python3.12/site-packages/click/_winconsole.py b/Backend/venv/lib/python3.12/site-packages/click/_winconsole.py
new file mode 100644
index 00000000..e56c7c6a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click/_winconsole.py
@@ -0,0 +1,296 @@
+# This module is based on the excellent work by Adam Bartoš who
+# provided a lot of what went into the implementation here in
+# the discussion to issue1602 in the Python bug tracker.
+#
+# There are some general differences in regards to how this works
+# compared to the original patches as we do not need to patch
+# the entire interpreter but just work in our little world of
+# echo and prompt.
+from __future__ import annotations
+
+import collections.abc as cabc
+import io
+import sys
+import time
+import typing as t
+from ctypes import Array
+from ctypes import byref
+from ctypes import c_char
+from ctypes import c_char_p
+from ctypes import c_int
+from ctypes import c_ssize_t
+from ctypes import c_ulong
+from ctypes import c_void_p
+from ctypes import POINTER
+from ctypes import py_object
+from ctypes import Structure
+from ctypes.wintypes import DWORD
+from ctypes.wintypes import HANDLE
+from ctypes.wintypes import LPCWSTR
+from ctypes.wintypes import LPWSTR
+
+from ._compat import _NonClosingTextIOWrapper
+
+assert sys.platform == "win32"
+import msvcrt # noqa: E402
+from ctypes import windll # noqa: E402
+from ctypes import WINFUNCTYPE # noqa: E402
+
+c_ssize_p = POINTER(c_ssize_t)
+
+kernel32 = windll.kernel32
+GetStdHandle = kernel32.GetStdHandle
+ReadConsoleW = kernel32.ReadConsoleW
+WriteConsoleW = kernel32.WriteConsoleW
+GetConsoleMode = kernel32.GetConsoleMode
+GetLastError = kernel32.GetLastError
+GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32))
+CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))(
+ ("CommandLineToArgvW", windll.shell32)
+)
+LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32))
+
+STDIN_HANDLE = GetStdHandle(-10)
+STDOUT_HANDLE = GetStdHandle(-11)
+STDERR_HANDLE = GetStdHandle(-12)
+
+PyBUF_SIMPLE = 0
+PyBUF_WRITABLE = 1
+
+ERROR_SUCCESS = 0
+ERROR_NOT_ENOUGH_MEMORY = 8
+ERROR_OPERATION_ABORTED = 995
+
+STDIN_FILENO = 0
+STDOUT_FILENO = 1
+STDERR_FILENO = 2
+
+EOF = b"\x1a"
+MAX_BYTES_WRITTEN = 32767
+
+if t.TYPE_CHECKING:
+ try:
+ # Using `typing_extensions.Buffer` instead of `collections.abc`
+ # on Windows for some reason does not have `Sized` implemented.
+ from collections.abc import Buffer # type: ignore
+ except ImportError:
+ from typing_extensions import Buffer
+
+try:
+ from ctypes import pythonapi
+except ImportError:
+ # On PyPy we cannot get buffers so our ability to operate here is
+ # severely limited.
+ get_buffer = None
+else:
+
+ class Py_buffer(Structure):
+ _fields_ = [ # noqa: RUF012
+ ("buf", c_void_p),
+ ("obj", py_object),
+ ("len", c_ssize_t),
+ ("itemsize", c_ssize_t),
+ ("readonly", c_int),
+ ("ndim", c_int),
+ ("format", c_char_p),
+ ("shape", c_ssize_p),
+ ("strides", c_ssize_p),
+ ("suboffsets", c_ssize_p),
+ ("internal", c_void_p),
+ ]
+
+ PyObject_GetBuffer = pythonapi.PyObject_GetBuffer
+ PyBuffer_Release = pythonapi.PyBuffer_Release
+
+ def get_buffer(obj: Buffer, writable: bool = False) -> Array[c_char]:
+ buf = Py_buffer()
+ flags: int = PyBUF_WRITABLE if writable else PyBUF_SIMPLE
+ PyObject_GetBuffer(py_object(obj), byref(buf), flags)
+
+ try:
+ buffer_type = c_char * buf.len
+ out: Array[c_char] = buffer_type.from_address(buf.buf)
+ return out
+ finally:
+ PyBuffer_Release(byref(buf))
+
+
+class _WindowsConsoleRawIOBase(io.RawIOBase):
+ def __init__(self, handle: int | None) -> None:
+ self.handle = handle
+
+ def isatty(self) -> t.Literal[True]:
+ super().isatty()
+ return True
+
+
+class _WindowsConsoleReader(_WindowsConsoleRawIOBase):
+ def readable(self) -> t.Literal[True]:
+ return True
+
+ def readinto(self, b: Buffer) -> int:
+ bytes_to_be_read = len(b)
+ if not bytes_to_be_read:
+ return 0
+ elif bytes_to_be_read % 2:
+ raise ValueError(
+ "cannot read odd number of bytes from UTF-16-LE encoded console"
+ )
+
+ buffer = get_buffer(b, writable=True)
+ code_units_to_be_read = bytes_to_be_read // 2
+ code_units_read = c_ulong()
+
+ rv = ReadConsoleW(
+ HANDLE(self.handle),
+ buffer,
+ code_units_to_be_read,
+ byref(code_units_read),
+ None,
+ )
+ if GetLastError() == ERROR_OPERATION_ABORTED:
+ # wait for KeyboardInterrupt
+ time.sleep(0.1)
+ if not rv:
+ raise OSError(f"Windows error: {GetLastError()}")
+
+ if buffer[0] == EOF:
+ return 0
+ return 2 * code_units_read.value
+
+
+class _WindowsConsoleWriter(_WindowsConsoleRawIOBase):
+ def writable(self) -> t.Literal[True]:
+ return True
+
+ @staticmethod
+ def _get_error_message(errno: int) -> str:
+ if errno == ERROR_SUCCESS:
+ return "ERROR_SUCCESS"
+ elif errno == ERROR_NOT_ENOUGH_MEMORY:
+ return "ERROR_NOT_ENOUGH_MEMORY"
+ return f"Windows error {errno}"
+
+ def write(self, b: Buffer) -> int:
+ bytes_to_be_written = len(b)
+ buf = get_buffer(b)
+ code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2
+ code_units_written = c_ulong()
+
+ WriteConsoleW(
+ HANDLE(self.handle),
+ buf,
+ code_units_to_be_written,
+ byref(code_units_written),
+ None,
+ )
+ bytes_written = 2 * code_units_written.value
+
+ if bytes_written == 0 and bytes_to_be_written > 0:
+ raise OSError(self._get_error_message(GetLastError()))
+ return bytes_written
+
+
+class ConsoleStream:
+ def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None:
+ self._text_stream = text_stream
+ self.buffer = byte_stream
+
+ @property
+ def name(self) -> str:
+ return self.buffer.name
+
+ def write(self, x: t.AnyStr) -> int:
+ if isinstance(x, str):
+ return self._text_stream.write(x)
+ try:
+ self.flush()
+ except Exception:
+ pass
+ return self.buffer.write(x)
+
+ def writelines(self, lines: cabc.Iterable[t.AnyStr]) -> None:
+ for line in lines:
+ self.write(line)
+
+ def __getattr__(self, name: str) -> t.Any:
+ return getattr(self._text_stream, name)
+
+ def isatty(self) -> bool:
+ return self.buffer.isatty()
+
+ def __repr__(self) -> str:
+ return f""
+
+
+def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO:
+ text_stream = _NonClosingTextIOWrapper(
+ io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)),
+ "utf-16-le",
+ "strict",
+ line_buffering=True,
+ )
+ return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream))
+
+
+def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO:
+ text_stream = _NonClosingTextIOWrapper(
+ io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)),
+ "utf-16-le",
+ "strict",
+ line_buffering=True,
+ )
+ return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream))
+
+
+def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO:
+ text_stream = _NonClosingTextIOWrapper(
+ io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)),
+ "utf-16-le",
+ "strict",
+ line_buffering=True,
+ )
+ return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream))
+
+
+_stream_factories: cabc.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = {
+ 0: _get_text_stdin,
+ 1: _get_text_stdout,
+ 2: _get_text_stderr,
+}
+
+
+def _is_console(f: t.TextIO) -> bool:
+ if not hasattr(f, "fileno"):
+ return False
+
+ try:
+ fileno = f.fileno()
+ except (OSError, io.UnsupportedOperation):
+ return False
+
+ handle = msvcrt.get_osfhandle(fileno)
+ return bool(GetConsoleMode(handle, byref(DWORD())))
+
+
+def _get_windows_console_stream(
+ f: t.TextIO, encoding: str | None, errors: str | None
+) -> t.TextIO | None:
+ if (
+ get_buffer is None
+ or encoding not in {"utf-16-le", None}
+ or errors not in {"strict", None}
+ or not _is_console(f)
+ ):
+ return None
+
+ func = _stream_factories.get(f.fileno())
+ if func is None:
+ return None
+
+ b = getattr(f, "buffer", None)
+
+ if b is None:
+ return None
+
+ return func(b)
diff --git a/Backend/venv/lib/python3.12/site-packages/click/core.py b/Backend/venv/lib/python3.12/site-packages/click/core.py
new file mode 100644
index 00000000..57f549c7
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click/core.py
@@ -0,0 +1,3415 @@
+from __future__ import annotations
+
+import collections.abc as cabc
+import enum
+import errno
+import inspect
+import os
+import sys
+import typing as t
+from collections import abc
+from collections import Counter
+from contextlib import AbstractContextManager
+from contextlib import contextmanager
+from contextlib import ExitStack
+from functools import update_wrapper
+from gettext import gettext as _
+from gettext import ngettext
+from itertools import repeat
+from types import TracebackType
+
+from . import types
+from ._utils import FLAG_NEEDS_VALUE
+from ._utils import UNSET
+from .exceptions import Abort
+from .exceptions import BadParameter
+from .exceptions import ClickException
+from .exceptions import Exit
+from .exceptions import MissingParameter
+from .exceptions import NoArgsIsHelpError
+from .exceptions import UsageError
+from .formatting import HelpFormatter
+from .formatting import join_options
+from .globals import pop_context
+from .globals import push_context
+from .parser import _OptionParser
+from .parser import _split_opt
+from .termui import confirm
+from .termui import prompt
+from .termui import style
+from .utils import _detect_program_name
+from .utils import _expand_args
+from .utils import echo
+from .utils import make_default_short_help
+from .utils import make_str
+from .utils import PacifyFlushWrapper
+
+if t.TYPE_CHECKING:
+ from .shell_completion import CompletionItem
+
+F = t.TypeVar("F", bound="t.Callable[..., t.Any]")
+V = t.TypeVar("V")
+
+
+def _complete_visible_commands(
+ ctx: Context, incomplete: str
+) -> cabc.Iterator[tuple[str, Command]]:
+ """List all the subcommands of a group that start with the
+ incomplete value and aren't hidden.
+
+ :param ctx: Invocation context for the group.
+ :param incomplete: Value being completed. May be empty.
+ """
+ multi = t.cast(Group, ctx.command)
+
+ for name in multi.list_commands(ctx):
+ if name.startswith(incomplete):
+ command = multi.get_command(ctx, name)
+
+ if command is not None and not command.hidden:
+ yield name, command
+
+
+def _check_nested_chain(
+ base_command: Group, cmd_name: str, cmd: Command, register: bool = False
+) -> None:
+ if not base_command.chain or not isinstance(cmd, Group):
+ return
+
+ if register:
+ message = (
+ f"It is not possible to add the group {cmd_name!r} to another"
+ f" group {base_command.name!r} that is in chain mode."
+ )
+ else:
+ message = (
+ f"Found the group {cmd_name!r} as subcommand to another group "
+ f" {base_command.name!r} that is in chain mode. This is not supported."
+ )
+
+ raise RuntimeError(message)
+
+
+def batch(iterable: cabc.Iterable[V], batch_size: int) -> list[tuple[V, ...]]:
+ return list(zip(*repeat(iter(iterable), batch_size), strict=False))
+
+
+@contextmanager
+def augment_usage_errors(
+ ctx: Context, param: Parameter | None = None
+) -> cabc.Iterator[None]:
+ """Context manager that attaches extra information to exceptions."""
+ try:
+ yield
+ except BadParameter as e:
+ if e.ctx is None:
+ e.ctx = ctx
+ if param is not None and e.param is None:
+ e.param = param
+ raise
+ except UsageError as e:
+ if e.ctx is None:
+ e.ctx = ctx
+ raise
+
+
+def iter_params_for_processing(
+ invocation_order: cabc.Sequence[Parameter],
+ declaration_order: cabc.Sequence[Parameter],
+) -> list[Parameter]:
+ """Returns all declared parameters in the order they should be processed.
+
+ The declared parameters are re-shuffled depending on the order in which
+ they were invoked, as well as the eagerness of each parameters.
+
+ The invocation order takes precedence over the declaration order. I.e. the
+ order in which the user provided them to the CLI is respected.
+
+ This behavior and its effect on callback evaluation is detailed at:
+ https://click.palletsprojects.com/en/stable/advanced/#callback-evaluation-order
+ """
+
+ def sort_key(item: Parameter) -> tuple[bool, float]:
+ try:
+ idx: float = invocation_order.index(item)
+ except ValueError:
+ idx = float("inf")
+
+ return not item.is_eager, idx
+
+ return sorted(declaration_order, key=sort_key)
+
+
+class ParameterSource(enum.Enum):
+ """This is an :class:`~enum.Enum` that indicates the source of a
+ parameter's value.
+
+ Use :meth:`click.Context.get_parameter_source` to get the
+ source for a parameter by name.
+
+ .. versionchanged:: 8.0
+ Use :class:`~enum.Enum` and drop the ``validate`` method.
+
+ .. versionchanged:: 8.0
+ Added the ``PROMPT`` value.
+ """
+
+ COMMANDLINE = enum.auto()
+ """The value was provided by the command line args."""
+ ENVIRONMENT = enum.auto()
+ """The value was provided with an environment variable."""
+ DEFAULT = enum.auto()
+ """Used the default specified by the parameter."""
+ DEFAULT_MAP = enum.auto()
+ """Used a default provided by :attr:`Context.default_map`."""
+ PROMPT = enum.auto()
+ """Used a prompt to confirm a default or provide a value."""
+
+
+class Context:
+ """The context is a special internal object that holds state relevant
+ for the script execution at every single level. It's normally invisible
+ to commands unless they opt-in to getting access to it.
+
+ The context is useful as it can pass internal objects around and can
+ control special execution features such as reading data from
+ environment variables.
+
+ A context can be used as context manager in which case it will call
+ :meth:`close` on teardown.
+
+ :param command: the command class for this context.
+ :param parent: the parent context.
+ :param info_name: the info name for this invocation. Generally this
+ is the most descriptive name for the script or
+ command. For the toplevel script it is usually
+ the name of the script, for commands below it it's
+ the name of the script.
+ :param obj: an arbitrary object of user data.
+ :param auto_envvar_prefix: the prefix to use for automatic environment
+ variables. If this is `None` then reading
+ from environment variables is disabled. This
+ does not affect manually set environment
+ variables which are always read.
+ :param default_map: a dictionary (like object) with default values
+ for parameters.
+ :param terminal_width: the width of the terminal. The default is
+ inherit from parent context. If no context
+ defines the terminal width then auto
+ detection will be applied.
+ :param max_content_width: the maximum width for content rendered by
+ Click (this currently only affects help
+ pages). This defaults to 80 characters if
+ not overridden. In other words: even if the
+ terminal is larger than that, Click will not
+ format things wider than 80 characters by
+ default. In addition to that, formatters might
+ add some safety mapping on the right.
+ :param resilient_parsing: if this flag is enabled then Click will
+ parse without any interactivity or callback
+ invocation. Default values will also be
+ ignored. This is useful for implementing
+ things such as completion support.
+ :param allow_extra_args: if this is set to `True` then extra arguments
+ at the end will not raise an error and will be
+ kept on the context. The default is to inherit
+ from the command.
+ :param allow_interspersed_args: if this is set to `False` then options
+ and arguments cannot be mixed. The
+ default is to inherit from the command.
+ :param ignore_unknown_options: instructs click to ignore options it does
+ not know and keeps them for later
+ processing.
+ :param help_option_names: optionally a list of strings that define how
+ the default help parameter is named. The
+ default is ``['--help']``.
+ :param token_normalize_func: an optional function that is used to
+ normalize tokens (options, choices,
+ etc.). This for instance can be used to
+ implement case insensitive behavior.
+ :param color: controls if the terminal supports ANSI colors or not. The
+ default is autodetection. This is only needed if ANSI
+ codes are used in texts that Click prints which is by
+ default not the case. This for instance would affect
+ help output.
+ :param show_default: Show the default value for commands. If this
+ value is not set, it defaults to the value from the parent
+ context. ``Command.show_default`` overrides this default for the
+ specific command.
+
+ .. versionchanged:: 8.2
+ The ``protected_args`` attribute is deprecated and will be removed in
+ Click 9.0. ``args`` will contain remaining unparsed tokens.
+
+ .. versionchanged:: 8.1
+ The ``show_default`` parameter is overridden by
+ ``Command.show_default``, instead of the other way around.
+
+ .. versionchanged:: 8.0
+ The ``show_default`` parameter defaults to the value from the
+ parent context.
+
+ .. versionchanged:: 7.1
+ Added the ``show_default`` parameter.
+
+ .. versionchanged:: 4.0
+ Added the ``color``, ``ignore_unknown_options``, and
+ ``max_content_width`` parameters.
+
+ .. versionchanged:: 3.0
+ Added the ``allow_extra_args`` and ``allow_interspersed_args``
+ parameters.
+
+ .. versionchanged:: 2.0
+ Added the ``resilient_parsing``, ``help_option_names``, and
+ ``token_normalize_func`` parameters.
+ """
+
+ #: The formatter class to create with :meth:`make_formatter`.
+ #:
+ #: .. versionadded:: 8.0
+ formatter_class: type[HelpFormatter] = HelpFormatter
+
+ def __init__(
+ self,
+ command: Command,
+ parent: Context | None = None,
+ info_name: str | None = None,
+ obj: t.Any | None = None,
+ auto_envvar_prefix: str | None = None,
+ default_map: cabc.MutableMapping[str, t.Any] | None = None,
+ terminal_width: int | None = None,
+ max_content_width: int | None = None,
+ resilient_parsing: bool = False,
+ allow_extra_args: bool | None = None,
+ allow_interspersed_args: bool | None = None,
+ ignore_unknown_options: bool | None = None,
+ help_option_names: list[str] | None = None,
+ token_normalize_func: t.Callable[[str], str] | None = None,
+ color: bool | None = None,
+ show_default: bool | None = None,
+ ) -> None:
+ #: the parent context or `None` if none exists.
+ self.parent = parent
+ #: the :class:`Command` for this context.
+ self.command = command
+ #: the descriptive information name
+ self.info_name = info_name
+ #: Map of parameter names to their parsed values. Parameters
+ #: with ``expose_value=False`` are not stored.
+ self.params: dict[str, t.Any] = {}
+ #: the leftover arguments.
+ self.args: list[str] = []
+ #: protected arguments. These are arguments that are prepended
+ #: to `args` when certain parsing scenarios are encountered but
+ #: must be never propagated to another arguments. This is used
+ #: to implement nested parsing.
+ self._protected_args: list[str] = []
+ #: the collected prefixes of the command's options.
+ self._opt_prefixes: set[str] = set(parent._opt_prefixes) if parent else set()
+
+ if obj is None and parent is not None:
+ obj = parent.obj
+
+ #: the user object stored.
+ self.obj: t.Any = obj
+ self._meta: dict[str, t.Any] = getattr(parent, "meta", {})
+
+ #: A dictionary (-like object) with defaults for parameters.
+ if (
+ default_map is None
+ and info_name is not None
+ and parent is not None
+ and parent.default_map is not None
+ ):
+ default_map = parent.default_map.get(info_name)
+
+ self.default_map: cabc.MutableMapping[str, t.Any] | None = default_map
+
+ #: This flag indicates if a subcommand is going to be executed. A
+ #: group callback can use this information to figure out if it's
+ #: being executed directly or because the execution flow passes
+ #: onwards to a subcommand. By default it's None, but it can be
+ #: the name of the subcommand to execute.
+ #:
+ #: If chaining is enabled this will be set to ``'*'`` in case
+ #: any commands are executed. It is however not possible to
+ #: figure out which ones. If you require this knowledge you
+ #: should use a :func:`result_callback`.
+ self.invoked_subcommand: str | None = None
+
+ if terminal_width is None and parent is not None:
+ terminal_width = parent.terminal_width
+
+ #: The width of the terminal (None is autodetection).
+ self.terminal_width: int | None = terminal_width
+
+ if max_content_width is None and parent is not None:
+ max_content_width = parent.max_content_width
+
+ #: The maximum width of formatted content (None implies a sensible
+ #: default which is 80 for most things).
+ self.max_content_width: int | None = max_content_width
+
+ if allow_extra_args is None:
+ allow_extra_args = command.allow_extra_args
+
+ #: Indicates if the context allows extra args or if it should
+ #: fail on parsing.
+ #:
+ #: .. versionadded:: 3.0
+ self.allow_extra_args = allow_extra_args
+
+ if allow_interspersed_args is None:
+ allow_interspersed_args = command.allow_interspersed_args
+
+ #: Indicates if the context allows mixing of arguments and
+ #: options or not.
+ #:
+ #: .. versionadded:: 3.0
+ self.allow_interspersed_args: bool = allow_interspersed_args
+
+ if ignore_unknown_options is None:
+ ignore_unknown_options = command.ignore_unknown_options
+
+ #: Instructs click to ignore options that a command does not
+ #: understand and will store it on the context for later
+ #: processing. This is primarily useful for situations where you
+ #: want to call into external programs. Generally this pattern is
+ #: strongly discouraged because it's not possibly to losslessly
+ #: forward all arguments.
+ #:
+ #: .. versionadded:: 4.0
+ self.ignore_unknown_options: bool = ignore_unknown_options
+
+ if help_option_names is None:
+ if parent is not None:
+ help_option_names = parent.help_option_names
+ else:
+ help_option_names = ["--help"]
+
+ #: The names for the help options.
+ self.help_option_names: list[str] = help_option_names
+
+ if token_normalize_func is None and parent is not None:
+ token_normalize_func = parent.token_normalize_func
+
+ #: An optional normalization function for tokens. This is
+ #: options, choices, commands etc.
+ self.token_normalize_func: t.Callable[[str], str] | None = token_normalize_func
+
+ #: Indicates if resilient parsing is enabled. In that case Click
+ #: will do its best to not cause any failures and default values
+ #: will be ignored. Useful for completion.
+ self.resilient_parsing: bool = resilient_parsing
+
+ # If there is no envvar prefix yet, but the parent has one and
+ # the command on this level has a name, we can expand the envvar
+ # prefix automatically.
+ if auto_envvar_prefix is None:
+ if (
+ parent is not None
+ and parent.auto_envvar_prefix is not None
+ and self.info_name is not None
+ ):
+ auto_envvar_prefix = (
+ f"{parent.auto_envvar_prefix}_{self.info_name.upper()}"
+ )
+ else:
+ auto_envvar_prefix = auto_envvar_prefix.upper()
+
+ if auto_envvar_prefix is not None:
+ auto_envvar_prefix = auto_envvar_prefix.replace("-", "_")
+
+ self.auto_envvar_prefix: str | None = auto_envvar_prefix
+
+ if color is None and parent is not None:
+ color = parent.color
+
+ #: Controls if styling output is wanted or not.
+ self.color: bool | None = color
+
+ if show_default is None and parent is not None:
+ show_default = parent.show_default
+
+ #: Show option default values when formatting help text.
+ self.show_default: bool | None = show_default
+
+ self._close_callbacks: list[t.Callable[[], t.Any]] = []
+ self._depth = 0
+ self._parameter_source: dict[str, ParameterSource] = {}
+ self._exit_stack = ExitStack()
+
+ @property
+ def protected_args(self) -> list[str]:
+ import warnings
+
+ warnings.warn(
+ "'protected_args' is deprecated and will be removed in Click 9.0."
+ " 'args' will contain remaining unparsed tokens.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self._protected_args
+
+ def to_info_dict(self) -> dict[str, t.Any]:
+ """Gather information that could be useful for a tool generating
+ user-facing documentation. This traverses the entire CLI
+ structure.
+
+ .. code-block:: python
+
+ with Context(cli) as ctx:
+ info = ctx.to_info_dict()
+
+ .. versionadded:: 8.0
+ """
+ return {
+ "command": self.command.to_info_dict(self),
+ "info_name": self.info_name,
+ "allow_extra_args": self.allow_extra_args,
+ "allow_interspersed_args": self.allow_interspersed_args,
+ "ignore_unknown_options": self.ignore_unknown_options,
+ "auto_envvar_prefix": self.auto_envvar_prefix,
+ }
+
+ def __enter__(self) -> Context:
+ self._depth += 1
+ push_context(self)
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ tb: TracebackType | None,
+ ) -> bool | None:
+ self._depth -= 1
+ exit_result: bool | None = None
+ if self._depth == 0:
+ exit_result = self._close_with_exception_info(exc_type, exc_value, tb)
+ pop_context()
+
+ return exit_result
+
+ @contextmanager
+ def scope(self, cleanup: bool = True) -> cabc.Iterator[Context]:
+ """This helper method can be used with the context object to promote
+ it to the current thread local (see :func:`get_current_context`).
+ The default behavior of this is to invoke the cleanup functions which
+ can be disabled by setting `cleanup` to `False`. The cleanup
+ functions are typically used for things such as closing file handles.
+
+ If the cleanup is intended the context object can also be directly
+ used as a context manager.
+
+ Example usage::
+
+ with ctx.scope():
+ assert get_current_context() is ctx
+
+ This is equivalent::
+
+ with ctx:
+ assert get_current_context() is ctx
+
+ .. versionadded:: 5.0
+
+ :param cleanup: controls if the cleanup functions should be run or
+ not. The default is to run these functions. In
+ some situations the context only wants to be
+ temporarily pushed in which case this can be disabled.
+ Nested pushes automatically defer the cleanup.
+ """
+ if not cleanup:
+ self._depth += 1
+ try:
+ with self as rv:
+ yield rv
+ finally:
+ if not cleanup:
+ self._depth -= 1
+
+ @property
+ def meta(self) -> dict[str, t.Any]:
+ """This is a dictionary which is shared with all the contexts
+ that are nested. It exists so that click utilities can store some
+ state here if they need to. It is however the responsibility of
+ that code to manage this dictionary well.
+
+ The keys are supposed to be unique dotted strings. For instance
+ module paths are a good choice for it. What is stored in there is
+ irrelevant for the operation of click. However what is important is
+ that code that places data here adheres to the general semantics of
+ the system.
+
+ Example usage::
+
+ LANG_KEY = f'{__name__}.lang'
+
+ def set_language(value):
+ ctx = get_current_context()
+ ctx.meta[LANG_KEY] = value
+
+ def get_language():
+ return get_current_context().meta.get(LANG_KEY, 'en_US')
+
+ .. versionadded:: 5.0
+ """
+ return self._meta
+
+ def make_formatter(self) -> HelpFormatter:
+ """Creates the :class:`~click.HelpFormatter` for the help and
+ usage output.
+
+ To quickly customize the formatter class used without overriding
+ this method, set the :attr:`formatter_class` attribute.
+
+ .. versionchanged:: 8.0
+ Added the :attr:`formatter_class` attribute.
+ """
+ return self.formatter_class(
+ width=self.terminal_width, max_width=self.max_content_width
+ )
+
+ def with_resource(self, context_manager: AbstractContextManager[V]) -> V:
+ """Register a resource as if it were used in a ``with``
+ statement. The resource will be cleaned up when the context is
+ popped.
+
+ Uses :meth:`contextlib.ExitStack.enter_context`. It calls the
+ resource's ``__enter__()`` method and returns the result. When
+ the context is popped, it closes the stack, which calls the
+ resource's ``__exit__()`` method.
+
+ To register a cleanup function for something that isn't a
+ context manager, use :meth:`call_on_close`. Or use something
+ from :mod:`contextlib` to turn it into a context manager first.
+
+ .. code-block:: python
+
+ @click.group()
+ @click.option("--name")
+ @click.pass_context
+ def cli(ctx):
+ ctx.obj = ctx.with_resource(connect_db(name))
+
+ :param context_manager: The context manager to enter.
+ :return: Whatever ``context_manager.__enter__()`` returns.
+
+ .. versionadded:: 8.0
+ """
+ return self._exit_stack.enter_context(context_manager)
+
+ def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]:
+ """Register a function to be called when the context tears down.
+
+ This can be used to close resources opened during the script
+ execution. Resources that support Python's context manager
+ protocol which would be used in a ``with`` statement should be
+ registered with :meth:`with_resource` instead.
+
+ :param f: The function to execute on teardown.
+ """
+ return self._exit_stack.callback(f)
+
+ def close(self) -> None:
+ """Invoke all close callbacks registered with
+ :meth:`call_on_close`, and exit all context managers entered
+ with :meth:`with_resource`.
+ """
+ self._close_with_exception_info(None, None, None)
+
+ def _close_with_exception_info(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ tb: TracebackType | None,
+ ) -> bool | None:
+ """Unwind the exit stack by calling its :meth:`__exit__` providing the exception
+ information to allow for exception handling by the various resources registered
+ using :meth;`with_resource`
+
+ :return: Whatever ``exit_stack.__exit__()`` returns.
+ """
+ exit_result = self._exit_stack.__exit__(exc_type, exc_value, tb)
+ # In case the context is reused, create a new exit stack.
+ self._exit_stack = ExitStack()
+
+ return exit_result
+
+ @property
+ def command_path(self) -> str:
+ """The computed command path. This is used for the ``usage``
+ information on the help page. It's automatically created by
+ combining the info names of the chain of contexts to the root.
+ """
+ rv = ""
+ if self.info_name is not None:
+ rv = self.info_name
+ if self.parent is not None:
+ parent_command_path = [self.parent.command_path]
+
+ if isinstance(self.parent.command, Command):
+ for param in self.parent.command.get_params(self):
+ parent_command_path.extend(param.get_usage_pieces(self))
+
+ rv = f"{' '.join(parent_command_path)} {rv}"
+ return rv.lstrip()
+
+ def find_root(self) -> Context:
+ """Finds the outermost context."""
+ node = self
+ while node.parent is not None:
+ node = node.parent
+ return node
+
+ def find_object(self, object_type: type[V]) -> V | None:
+ """Finds the closest object of a given type."""
+ node: Context | None = self
+
+ while node is not None:
+ if isinstance(node.obj, object_type):
+ return node.obj
+
+ node = node.parent
+
+ return None
+
+ def ensure_object(self, object_type: type[V]) -> V:
+ """Like :meth:`find_object` but sets the innermost object to a
+ new instance of `object_type` if it does not exist.
+ """
+ rv = self.find_object(object_type)
+ if rv is None:
+ self.obj = rv = object_type()
+ return rv
+
+ @t.overload
+ def lookup_default(
+ self, name: str, call: t.Literal[True] = True
+ ) -> t.Any | None: ...
+
+ @t.overload
+ def lookup_default(
+ self, name: str, call: t.Literal[False] = ...
+ ) -> t.Any | t.Callable[[], t.Any] | None: ...
+
+ def lookup_default(self, name: str, call: bool = True) -> t.Any | None:
+ """Get the default for a parameter from :attr:`default_map`.
+
+ :param name: Name of the parameter.
+ :param call: If the default is a callable, call it. Disable to
+ return the callable instead.
+
+ .. versionchanged:: 8.0
+ Added the ``call`` parameter.
+ """
+ if self.default_map is not None:
+ value = self.default_map.get(name, UNSET)
+
+ if call and callable(value):
+ return value()
+
+ return value
+
+ return UNSET
+
+ def fail(self, message: str) -> t.NoReturn:
+ """Aborts the execution of the program with a specific error
+ message.
+
+ :param message: the error message to fail with.
+ """
+ raise UsageError(message, self)
+
+ def abort(self) -> t.NoReturn:
+ """Aborts the script."""
+ raise Abort()
+
+ def exit(self, code: int = 0) -> t.NoReturn:
+ """Exits the application with a given exit code.
+
+ .. versionchanged:: 8.2
+ Callbacks and context managers registered with :meth:`call_on_close`
+ and :meth:`with_resource` are closed before exiting.
+ """
+ self.close()
+ raise Exit(code)
+
+ def get_usage(self) -> str:
+ """Helper method to get formatted usage string for the current
+ context and command.
+ """
+ return self.command.get_usage(self)
+
+ def get_help(self) -> str:
+ """Helper method to get formatted help page for the current
+ context and command.
+ """
+ return self.command.get_help(self)
+
+ def _make_sub_context(self, command: Command) -> Context:
+ """Create a new context of the same type as this context, but
+ for a new command.
+
+ :meta private:
+ """
+ return type(self)(command, info_name=command.name, parent=self)
+
+ @t.overload
+ def invoke(
+ self, callback: t.Callable[..., V], /, *args: t.Any, **kwargs: t.Any
+ ) -> V: ...
+
+ @t.overload
+ def invoke(self, callback: Command, /, *args: t.Any, **kwargs: t.Any) -> t.Any: ...
+
+ def invoke(
+ self, callback: Command | t.Callable[..., V], /, *args: t.Any, **kwargs: t.Any
+ ) -> t.Any | V:
+ """Invokes a command callback in exactly the way it expects. There
+ are two ways to invoke this method:
+
+ 1. the first argument can be a callback and all other arguments and
+ keyword arguments are forwarded directly to the function.
+ 2. the first argument is a click command object. In that case all
+ arguments are forwarded as well but proper click parameters
+ (options and click arguments) must be keyword arguments and Click
+ will fill in defaults.
+
+ .. versionchanged:: 8.0
+ All ``kwargs`` are tracked in :attr:`params` so they will be
+ passed if :meth:`forward` is called at multiple levels.
+
+ .. versionchanged:: 3.2
+ A new context is created, and missing arguments use default values.
+ """
+ if isinstance(callback, Command):
+ other_cmd = callback
+
+ if other_cmd.callback is None:
+ raise TypeError(
+ "The given command does not have a callback that can be invoked."
+ )
+ else:
+ callback = t.cast("t.Callable[..., V]", other_cmd.callback)
+
+ ctx = self._make_sub_context(other_cmd)
+
+ for param in other_cmd.params:
+ if param.name not in kwargs and param.expose_value:
+ default_value = param.get_default(ctx)
+ # We explicitly hide the :attr:`UNSET` value to the user, as we
+ # choose to make it an implementation detail. And because ``invoke``
+ # has been designed as part of Click public API, we return ``None``
+ # instead. Refs:
+ # https://github.com/pallets/click/issues/3066
+ # https://github.com/pallets/click/issues/3065
+ # https://github.com/pallets/click/pull/3068
+ if default_value is UNSET:
+ default_value = None
+ kwargs[param.name] = param.type_cast_value( # type: ignore
+ ctx, default_value
+ )
+
+ # Track all kwargs as params, so that forward() will pass
+ # them on in subsequent calls.
+ ctx.params.update(kwargs)
+ else:
+ ctx = self
+
+ with augment_usage_errors(self):
+ with ctx:
+ return callback(*args, **kwargs)
+
+ def forward(self, cmd: Command, /, *args: t.Any, **kwargs: t.Any) -> t.Any:
+ """Similar to :meth:`invoke` but fills in default keyword
+ arguments from the current context if the other command expects
+ it. This cannot invoke callbacks directly, only other commands.
+
+ .. versionchanged:: 8.0
+ All ``kwargs`` are tracked in :attr:`params` so they will be
+ passed if ``forward`` is called at multiple levels.
+ """
+ # Can only forward to other commands, not direct callbacks.
+ if not isinstance(cmd, Command):
+ raise TypeError("Callback is not a command.")
+
+ for param in self.params:
+ if param not in kwargs:
+ kwargs[param] = self.params[param]
+
+ return self.invoke(cmd, *args, **kwargs)
+
+ def set_parameter_source(self, name: str, source: ParameterSource) -> None:
+ """Set the source of a parameter. This indicates the location
+ from which the value of the parameter was obtained.
+
+ :param name: The name of the parameter.
+ :param source: A member of :class:`~click.core.ParameterSource`.
+ """
+ self._parameter_source[name] = source
+
+ def get_parameter_source(self, name: str) -> ParameterSource | None:
+ """Get the source of a parameter. This indicates the location
+ from which the value of the parameter was obtained.
+
+ This can be useful for determining when a user specified a value
+ on the command line that is the same as the default value. It
+ will be :attr:`~click.core.ParameterSource.DEFAULT` only if the
+ value was actually taken from the default.
+
+ :param name: The name of the parameter.
+ :rtype: ParameterSource
+
+ .. versionchanged:: 8.0
+ Returns ``None`` if the parameter was not provided from any
+ source.
+ """
+ return self._parameter_source.get(name)
+
+
+class Command:
+ """Commands are the basic building block of command line interfaces in
+ Click. A basic command handles command line parsing and might dispatch
+ more parsing to commands nested below it.
+
+ :param name: the name of the command to use unless a group overrides it.
+ :param context_settings: an optional dictionary with defaults that are
+ passed to the context object.
+ :param callback: the callback to invoke. This is optional.
+ :param params: the parameters to register with this command. This can
+ be either :class:`Option` or :class:`Argument` objects.
+ :param help: the help string to use for this command.
+ :param epilog: like the help string but it's printed at the end of the
+ help page after everything else.
+ :param short_help: the short help to use for this command. This is
+ shown on the command listing of the parent command.
+ :param add_help_option: by default each command registers a ``--help``
+ option. This can be disabled by this parameter.
+ :param no_args_is_help: this controls what happens if no arguments are
+ provided. This option is disabled by default.
+ If enabled this will add ``--help`` as argument
+ if no arguments are passed
+ :param hidden: hide this command from help outputs.
+ :param deprecated: If ``True`` or non-empty string, issues a message
+ indicating that the command is deprecated and highlights
+ its deprecation in --help. The message can be customized
+ by using a string as the value.
+
+ .. versionchanged:: 8.2
+ This is the base class for all commands, not ``BaseCommand``.
+ ``deprecated`` can be set to a string as well to customize the
+ deprecation message.
+
+ .. versionchanged:: 8.1
+ ``help``, ``epilog``, and ``short_help`` are stored unprocessed,
+ all formatting is done when outputting help text, not at init,
+ and is done even if not using the ``@command`` decorator.
+
+ .. versionchanged:: 8.0
+ Added a ``repr`` showing the command name.
+
+ .. versionchanged:: 7.1
+ Added the ``no_args_is_help`` parameter.
+
+ .. versionchanged:: 2.0
+ Added the ``context_settings`` parameter.
+ """
+
+ #: The context class to create with :meth:`make_context`.
+ #:
+ #: .. versionadded:: 8.0
+ context_class: type[Context] = Context
+
+ #: the default for the :attr:`Context.allow_extra_args` flag.
+ allow_extra_args = False
+
+ #: the default for the :attr:`Context.allow_interspersed_args` flag.
+ allow_interspersed_args = True
+
+ #: the default for the :attr:`Context.ignore_unknown_options` flag.
+ ignore_unknown_options = False
+
+ def __init__(
+ self,
+ name: str | None,
+ context_settings: cabc.MutableMapping[str, t.Any] | None = None,
+ callback: t.Callable[..., t.Any] | None = None,
+ params: list[Parameter] | None = None,
+ help: str | None = None,
+ epilog: str | None = None,
+ short_help: str | None = None,
+ options_metavar: str | None = "[OPTIONS]",
+ add_help_option: bool = True,
+ no_args_is_help: bool = False,
+ hidden: bool = False,
+ deprecated: bool | str = False,
+ ) -> None:
+ #: the name the command thinks it has. Upon registering a command
+ #: on a :class:`Group` the group will default the command name
+ #: with this information. You should instead use the
+ #: :class:`Context`\'s :attr:`~Context.info_name` attribute.
+ self.name = name
+
+ if context_settings is None:
+ context_settings = {}
+
+ #: an optional dictionary with defaults passed to the context.
+ self.context_settings: cabc.MutableMapping[str, t.Any] = context_settings
+
+ #: the callback to execute when the command fires. This might be
+ #: `None` in which case nothing happens.
+ self.callback = callback
+ #: the list of parameters for this command in the order they
+ #: should show up in the help page and execute. Eager parameters
+ #: will automatically be handled before non eager ones.
+ self.params: list[Parameter] = params or []
+ self.help = help
+ self.epilog = epilog
+ self.options_metavar = options_metavar
+ self.short_help = short_help
+ self.add_help_option = add_help_option
+ self._help_option = None
+ self.no_args_is_help = no_args_is_help
+ self.hidden = hidden
+ self.deprecated = deprecated
+
+ def to_info_dict(self, ctx: Context) -> dict[str, t.Any]:
+ return {
+ "name": self.name,
+ "params": [param.to_info_dict() for param in self.get_params(ctx)],
+ "help": self.help,
+ "epilog": self.epilog,
+ "short_help": self.short_help,
+ "hidden": self.hidden,
+ "deprecated": self.deprecated,
+ }
+
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__} {self.name}>"
+
+ def get_usage(self, ctx: Context) -> str:
+ """Formats the usage line into a string and returns it.
+
+ Calls :meth:`format_usage` internally.
+ """
+ formatter = ctx.make_formatter()
+ self.format_usage(ctx, formatter)
+ return formatter.getvalue().rstrip("\n")
+
+ def get_params(self, ctx: Context) -> list[Parameter]:
+ params = self.params
+ help_option = self.get_help_option(ctx)
+
+ if help_option is not None:
+ params = [*params, help_option]
+
+ if __debug__:
+ import warnings
+
+ opts = [opt for param in params for opt in param.opts]
+ opts_counter = Counter(opts)
+ duplicate_opts = (opt for opt, count in opts_counter.items() if count > 1)
+
+ for duplicate_opt in duplicate_opts:
+ warnings.warn(
+ (
+ f"The parameter {duplicate_opt} is used more than once. "
+ "Remove its duplicate as parameters should be unique."
+ ),
+ stacklevel=3,
+ )
+
+ return params
+
+ def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None:
+ """Writes the usage line into the formatter.
+
+ This is a low-level method called by :meth:`get_usage`.
+ """
+ pieces = self.collect_usage_pieces(ctx)
+ formatter.write_usage(ctx.command_path, " ".join(pieces))
+
+ def collect_usage_pieces(self, ctx: Context) -> list[str]:
+ """Returns all the pieces that go into the usage line and returns
+ it as a list of strings.
+ """
+ rv = [self.options_metavar] if self.options_metavar else []
+
+ for param in self.get_params(ctx):
+ rv.extend(param.get_usage_pieces(ctx))
+
+ return rv
+
+ def get_help_option_names(self, ctx: Context) -> list[str]:
+ """Returns the names for the help option."""
+ all_names = set(ctx.help_option_names)
+ for param in self.params:
+ all_names.difference_update(param.opts)
+ all_names.difference_update(param.secondary_opts)
+ return list(all_names)
+
+ def get_help_option(self, ctx: Context) -> Option | None:
+ """Returns the help option object.
+
+ Skipped if :attr:`add_help_option` is ``False``.
+
+ .. versionchanged:: 8.1.8
+ The help option is now cached to avoid creating it multiple times.
+ """
+ help_option_names = self.get_help_option_names(ctx)
+
+ if not help_option_names or not self.add_help_option:
+ return None
+
+ # Cache the help option object in private _help_option attribute to
+ # avoid creating it multiple times. Not doing this will break the
+ # callback odering by iter_params_for_processing(), which relies on
+ # object comparison.
+ if self._help_option is None:
+ # Avoid circular import.
+ from .decorators import help_option
+
+ # Apply help_option decorator and pop resulting option
+ help_option(*help_option_names)(self)
+ self._help_option = self.params.pop() # type: ignore[assignment]
+
+ return self._help_option
+
+ def make_parser(self, ctx: Context) -> _OptionParser:
+ """Creates the underlying option parser for this command."""
+ parser = _OptionParser(ctx)
+ for param in self.get_params(ctx):
+ param.add_to_parser(parser, ctx)
+ return parser
+
+ def get_help(self, ctx: Context) -> str:
+ """Formats the help into a string and returns it.
+
+ Calls :meth:`format_help` internally.
+ """
+ formatter = ctx.make_formatter()
+ self.format_help(ctx, formatter)
+ return formatter.getvalue().rstrip("\n")
+
+ def get_short_help_str(self, limit: int = 45) -> str:
+ """Gets short help for the command or makes it by shortening the
+ long help string.
+ """
+ if self.short_help:
+ text = inspect.cleandoc(self.short_help)
+ elif self.help:
+ text = make_default_short_help(self.help, limit)
+ else:
+ text = ""
+
+ if self.deprecated:
+ deprecated_message = (
+ f"(DEPRECATED: {self.deprecated})"
+ if isinstance(self.deprecated, str)
+ else "(DEPRECATED)"
+ )
+ text = _("{text} {deprecated_message}").format(
+ text=text, deprecated_message=deprecated_message
+ )
+
+ return text.strip()
+
+ def format_help(self, ctx: Context, formatter: HelpFormatter) -> None:
+ """Writes the help into the formatter if it exists.
+
+ This is a low-level method called by :meth:`get_help`.
+
+ This calls the following methods:
+
+ - :meth:`format_usage`
+ - :meth:`format_help_text`
+ - :meth:`format_options`
+ - :meth:`format_epilog`
+ """
+ self.format_usage(ctx, formatter)
+ self.format_help_text(ctx, formatter)
+ self.format_options(ctx, formatter)
+ self.format_epilog(ctx, formatter)
+
+ def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None:
+ """Writes the help text to the formatter if it exists."""
+ if self.help is not None:
+ # truncate the help text to the first form feed
+ text = inspect.cleandoc(self.help).partition("\f")[0]
+ else:
+ text = ""
+
+ if self.deprecated:
+ deprecated_message = (
+ f"(DEPRECATED: {self.deprecated})"
+ if isinstance(self.deprecated, str)
+ else "(DEPRECATED)"
+ )
+ text = _("{text} {deprecated_message}").format(
+ text=text, deprecated_message=deprecated_message
+ )
+
+ if text:
+ formatter.write_paragraph()
+
+ with formatter.indentation():
+ formatter.write_text(text)
+
+ def format_options(self, ctx: Context, formatter: HelpFormatter) -> None:
+ """Writes all the options into the formatter if they exist."""
+ opts = []
+ for param in self.get_params(ctx):
+ rv = param.get_help_record(ctx)
+ if rv is not None:
+ opts.append(rv)
+
+ if opts:
+ with formatter.section(_("Options")):
+ formatter.write_dl(opts)
+
+ def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None:
+ """Writes the epilog into the formatter if it exists."""
+ if self.epilog:
+ epilog = inspect.cleandoc(self.epilog)
+ formatter.write_paragraph()
+
+ with formatter.indentation():
+ formatter.write_text(epilog)
+
+ def make_context(
+ self,
+ info_name: str | None,
+ args: list[str],
+ parent: Context | None = None,
+ **extra: t.Any,
+ ) -> Context:
+ """This function when given an info name and arguments will kick
+ off the parsing and create a new :class:`Context`. It does not
+ invoke the actual command callback though.
+
+ To quickly customize the context class used without overriding
+ this method, set the :attr:`context_class` attribute.
+
+ :param info_name: the info name for this invocation. Generally this
+ is the most descriptive name for the script or
+ command. For the toplevel script it's usually
+ the name of the script, for commands below it's
+ the name of the command.
+ :param args: the arguments to parse as list of strings.
+ :param parent: the parent context if available.
+ :param extra: extra keyword arguments forwarded to the context
+ constructor.
+
+ .. versionchanged:: 8.0
+ Added the :attr:`context_class` attribute.
+ """
+ for key, value in self.context_settings.items():
+ if key not in extra:
+ extra[key] = value
+
+ ctx = self.context_class(self, info_name=info_name, parent=parent, **extra)
+
+ with ctx.scope(cleanup=False):
+ self.parse_args(ctx, args)
+ return ctx
+
+ def parse_args(self, ctx: Context, args: list[str]) -> list[str]:
+ if not args and self.no_args_is_help and not ctx.resilient_parsing:
+ raise NoArgsIsHelpError(ctx)
+
+ parser = self.make_parser(ctx)
+ opts, args, param_order = parser.parse_args(args=args)
+
+ for param in iter_params_for_processing(param_order, self.get_params(ctx)):
+ _, args = param.handle_parse_result(ctx, opts, args)
+
+ # We now have all parameters' values into `ctx.params`, but the data may contain
+ # the `UNSET` sentinel.
+ # Convert `UNSET` to `None` to ensure that the user doesn't see `UNSET`.
+ #
+ # Waiting until after the initial parse to convert allows us to treat `UNSET`
+ # more like a missing value when multiple params use the same name.
+ # Refs:
+ # https://github.com/pallets/click/issues/3071
+ # https://github.com/pallets/click/pull/3079
+ for name, value in ctx.params.items():
+ if value is UNSET:
+ ctx.params[name] = None
+
+ if args and not ctx.allow_extra_args and not ctx.resilient_parsing:
+ ctx.fail(
+ ngettext(
+ "Got unexpected extra argument ({args})",
+ "Got unexpected extra arguments ({args})",
+ len(args),
+ ).format(args=" ".join(map(str, args)))
+ )
+
+ ctx.args = args
+ ctx._opt_prefixes.update(parser._opt_prefixes)
+ return args
+
+ def invoke(self, ctx: Context) -> t.Any:
+ """Given a context, this invokes the attached callback (if it exists)
+ in the right way.
+ """
+ if self.deprecated:
+ extra_message = (
+ f" {self.deprecated}" if isinstance(self.deprecated, str) else ""
+ )
+ message = _(
+ "DeprecationWarning: The command {name!r} is deprecated.{extra_message}"
+ ).format(name=self.name, extra_message=extra_message)
+ echo(style(message, fg="red"), err=True)
+
+ if self.callback is not None:
+ return ctx.invoke(self.callback, **ctx.params)
+
+ def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]:
+ """Return a list of completions for the incomplete value. Looks
+ at the names of options and chained multi-commands.
+
+ Any command could be part of a chained multi-command, so sibling
+ commands are valid at any point during command completion.
+
+ :param ctx: Invocation context for this command.
+ :param incomplete: Value being completed. May be empty.
+
+ .. versionadded:: 8.0
+ """
+ from click.shell_completion import CompletionItem
+
+ results: list[CompletionItem] = []
+
+ if incomplete and not incomplete[0].isalnum():
+ for param in self.get_params(ctx):
+ if (
+ not isinstance(param, Option)
+ or param.hidden
+ or (
+ not param.multiple
+ and ctx.get_parameter_source(param.name) # type: ignore
+ is ParameterSource.COMMANDLINE
+ )
+ ):
+ continue
+
+ results.extend(
+ CompletionItem(name, help=param.help)
+ for name in [*param.opts, *param.secondary_opts]
+ if name.startswith(incomplete)
+ )
+
+ while ctx.parent is not None:
+ ctx = ctx.parent
+
+ if isinstance(ctx.command, Group) and ctx.command.chain:
+ results.extend(
+ CompletionItem(name, help=command.get_short_help_str())
+ for name, command in _complete_visible_commands(ctx, incomplete)
+ if name not in ctx._protected_args
+ )
+
+ return results
+
+ @t.overload
+ def main(
+ self,
+ args: cabc.Sequence[str] | None = None,
+ prog_name: str | None = None,
+ complete_var: str | None = None,
+ standalone_mode: t.Literal[True] = True,
+ **extra: t.Any,
+ ) -> t.NoReturn: ...
+
+ @t.overload
+ def main(
+ self,
+ args: cabc.Sequence[str] | None = None,
+ prog_name: str | None = None,
+ complete_var: str | None = None,
+ standalone_mode: bool = ...,
+ **extra: t.Any,
+ ) -> t.Any: ...
+
+ def main(
+ self,
+ args: cabc.Sequence[str] | None = None,
+ prog_name: str | None = None,
+ complete_var: str | None = None,
+ standalone_mode: bool = True,
+ windows_expand_args: bool = True,
+ **extra: t.Any,
+ ) -> t.Any:
+ """This is the way to invoke a script with all the bells and
+ whistles as a command line application. This will always terminate
+ the application after a call. If this is not wanted, ``SystemExit``
+ needs to be caught.
+
+ This method is also available by directly calling the instance of
+ a :class:`Command`.
+
+ :param args: the arguments that should be used for parsing. If not
+ provided, ``sys.argv[1:]`` is used.
+ :param prog_name: the program name that should be used. By default
+ the program name is constructed by taking the file
+ name from ``sys.argv[0]``.
+ :param complete_var: the environment variable that controls the
+ bash completion support. The default is
+ ``"__COMPLETE"`` with prog_name in
+ uppercase.
+ :param standalone_mode: the default behavior is to invoke the script
+ in standalone mode. Click will then
+ handle exceptions and convert them into
+ error messages and the function will never
+ return but shut down the interpreter. If
+ this is set to `False` they will be
+ propagated to the caller and the return
+ value of this function is the return value
+ of :meth:`invoke`.
+ :param windows_expand_args: Expand glob patterns, user dir, and
+ env vars in command line args on Windows.
+ :param extra: extra keyword arguments are forwarded to the context
+ constructor. See :class:`Context` for more information.
+
+ .. versionchanged:: 8.0.1
+ Added the ``windows_expand_args`` parameter to allow
+ disabling command line arg expansion on Windows.
+
+ .. versionchanged:: 8.0
+ When taking arguments from ``sys.argv`` on Windows, glob
+ patterns, user dir, and env vars are expanded.
+
+ .. versionchanged:: 3.0
+ Added the ``standalone_mode`` parameter.
+ """
+ if args is None:
+ args = sys.argv[1:]
+
+ if os.name == "nt" and windows_expand_args:
+ args = _expand_args(args)
+ else:
+ args = list(args)
+
+ if prog_name is None:
+ prog_name = _detect_program_name()
+
+ # Process shell completion requests and exit early.
+ self._main_shell_completion(extra, prog_name, complete_var)
+
+ try:
+ try:
+ with self.make_context(prog_name, args, **extra) as ctx:
+ rv = self.invoke(ctx)
+ if not standalone_mode:
+ return rv
+ # it's not safe to `ctx.exit(rv)` here!
+ # note that `rv` may actually contain data like "1" which
+ # has obvious effects
+ # more subtle case: `rv=[None, None]` can come out of
+ # chained commands which all returned `None` -- so it's not
+ # even always obvious that `rv` indicates success/failure
+ # by its truthiness/falsiness
+ ctx.exit()
+ except (EOFError, KeyboardInterrupt) as e:
+ echo(file=sys.stderr)
+ raise Abort() from e
+ except ClickException as e:
+ if not standalone_mode:
+ raise
+ e.show()
+ sys.exit(e.exit_code)
+ except OSError as e:
+ if e.errno == errno.EPIPE:
+ sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout))
+ sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr))
+ sys.exit(1)
+ else:
+ raise
+ except Exit as e:
+ if standalone_mode:
+ sys.exit(e.exit_code)
+ else:
+ # in non-standalone mode, return the exit code
+ # note that this is only reached if `self.invoke` above raises
+ # an Exit explicitly -- thus bypassing the check there which
+ # would return its result
+ # the results of non-standalone execution may therefore be
+ # somewhat ambiguous: if there are codepaths which lead to
+ # `ctx.exit(1)` and to `return 1`, the caller won't be able to
+ # tell the difference between the two
+ return e.exit_code
+ except Abort:
+ if not standalone_mode:
+ raise
+ echo(_("Aborted!"), file=sys.stderr)
+ sys.exit(1)
+
+ def _main_shell_completion(
+ self,
+ ctx_args: cabc.MutableMapping[str, t.Any],
+ prog_name: str,
+ complete_var: str | None = None,
+ ) -> None:
+ """Check if the shell is asking for tab completion, process
+ that, then exit early. Called from :meth:`main` before the
+ program is invoked.
+
+ :param prog_name: Name of the executable in the shell.
+ :param complete_var: Name of the environment variable that holds
+ the completion instruction. Defaults to
+ ``_{PROG_NAME}_COMPLETE``.
+
+ .. versionchanged:: 8.2.0
+ Dots (``.``) in ``prog_name`` are replaced with underscores (``_``).
+ """
+ if complete_var is None:
+ complete_name = prog_name.replace("-", "_").replace(".", "_")
+ complete_var = f"_{complete_name}_COMPLETE".upper()
+
+ instruction = os.environ.get(complete_var)
+
+ if not instruction:
+ return
+
+ from .shell_completion import shell_complete
+
+ rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction)
+ sys.exit(rv)
+
+ def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
+ """Alias for :meth:`main`."""
+ return self.main(*args, **kwargs)
+
+
+class _FakeSubclassCheck(type):
+ def __subclasscheck__(cls, subclass: type) -> bool:
+ return issubclass(subclass, cls.__bases__[0])
+
+ def __instancecheck__(cls, instance: t.Any) -> bool:
+ return isinstance(instance, cls.__bases__[0])
+
+
+class _BaseCommand(Command, metaclass=_FakeSubclassCheck):
+ """
+ .. deprecated:: 8.2
+ Will be removed in Click 9.0. Use ``Command`` instead.
+ """
+
+
+class Group(Command):
+ """A group is a command that nests other commands (or more groups).
+
+ :param name: The name of the group command.
+ :param commands: Map names to :class:`Command` objects. Can be a list, which
+ will use :attr:`Command.name` as the keys.
+ :param invoke_without_command: Invoke the group's callback even if a
+ subcommand is not given.
+ :param no_args_is_help: If no arguments are given, show the group's help and
+ exit. Defaults to the opposite of ``invoke_without_command``.
+ :param subcommand_metavar: How to represent the subcommand argument in help.
+ The default will represent whether ``chain`` is set or not.
+ :param chain: Allow passing more than one subcommand argument. After parsing
+ a command's arguments, if any arguments remain another command will be
+ matched, and so on.
+ :param result_callback: A function to call after the group's and
+ subcommand's callbacks. The value returned by the subcommand is passed.
+ If ``chain`` is enabled, the value will be a list of values returned by
+ all the commands. If ``invoke_without_command`` is enabled, the value
+ will be the value returned by the group's callback, or an empty list if
+ ``chain`` is enabled.
+ :param kwargs: Other arguments passed to :class:`Command`.
+
+ .. versionchanged:: 8.0
+ The ``commands`` argument can be a list of command objects.
+
+ .. versionchanged:: 8.2
+ Merged with and replaces the ``MultiCommand`` base class.
+ """
+
+ allow_extra_args = True
+ allow_interspersed_args = False
+
+ #: If set, this is used by the group's :meth:`command` decorator
+ #: as the default :class:`Command` class. This is useful to make all
+ #: subcommands use a custom command class.
+ #:
+ #: .. versionadded:: 8.0
+ command_class: type[Command] | None = None
+
+ #: If set, this is used by the group's :meth:`group` decorator
+ #: as the default :class:`Group` class. This is useful to make all
+ #: subgroups use a custom group class.
+ #:
+ #: If set to the special value :class:`type` (literally
+ #: ``group_class = type``), this group's class will be used as the
+ #: default class. This makes a custom group class continue to make
+ #: custom groups.
+ #:
+ #: .. versionadded:: 8.0
+ group_class: type[Group] | type[type] | None = None
+ # Literal[type] isn't valid, so use Type[type]
+
+ def __init__(
+ self,
+ name: str | None = None,
+ commands: cabc.MutableMapping[str, Command]
+ | cabc.Sequence[Command]
+ | None = None,
+ invoke_without_command: bool = False,
+ no_args_is_help: bool | None = None,
+ subcommand_metavar: str | None = None,
+ chain: bool = False,
+ result_callback: t.Callable[..., t.Any] | None = None,
+ **kwargs: t.Any,
+ ) -> None:
+ super().__init__(name, **kwargs)
+
+ if commands is None:
+ commands = {}
+ elif isinstance(commands, abc.Sequence):
+ commands = {c.name: c for c in commands if c.name is not None}
+
+ #: The registered subcommands by their exported names.
+ self.commands: cabc.MutableMapping[str, Command] = commands
+
+ if no_args_is_help is None:
+ no_args_is_help = not invoke_without_command
+
+ self.no_args_is_help = no_args_is_help
+ self.invoke_without_command = invoke_without_command
+
+ if subcommand_metavar is None:
+ if chain:
+ subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..."
+ else:
+ subcommand_metavar = "COMMAND [ARGS]..."
+
+ self.subcommand_metavar = subcommand_metavar
+ self.chain = chain
+ # The result callback that is stored. This can be set or
+ # overridden with the :func:`result_callback` decorator.
+ self._result_callback = result_callback
+
+ if self.chain:
+ for param in self.params:
+ if isinstance(param, Argument) and not param.required:
+ raise RuntimeError(
+ "A group in chain mode cannot have optional arguments."
+ )
+
+ def to_info_dict(self, ctx: Context) -> dict[str, t.Any]:
+ info_dict = super().to_info_dict(ctx)
+ commands = {}
+
+ for name in self.list_commands(ctx):
+ command = self.get_command(ctx, name)
+
+ if command is None:
+ continue
+
+ sub_ctx = ctx._make_sub_context(command)
+
+ with sub_ctx.scope(cleanup=False):
+ commands[name] = command.to_info_dict(sub_ctx)
+
+ info_dict.update(commands=commands, chain=self.chain)
+ return info_dict
+
+ def add_command(self, cmd: Command, name: str | None = None) -> None:
+ """Registers another :class:`Command` with this group. If the name
+ is not provided, the name of the command is used.
+ """
+ name = name or cmd.name
+ if name is None:
+ raise TypeError("Command has no name.")
+ _check_nested_chain(self, name, cmd, register=True)
+ self.commands[name] = cmd
+
+ @t.overload
+ def command(self, __func: t.Callable[..., t.Any]) -> Command: ...
+
+ @t.overload
+ def command(
+ self, *args: t.Any, **kwargs: t.Any
+ ) -> t.Callable[[t.Callable[..., t.Any]], Command]: ...
+
+ def command(
+ self, *args: t.Any, **kwargs: t.Any
+ ) -> t.Callable[[t.Callable[..., t.Any]], Command] | Command:
+ """A shortcut decorator for declaring and attaching a command to
+ the group. This takes the same arguments as :func:`command` and
+ immediately registers the created command with this group by
+ calling :meth:`add_command`.
+
+ To customize the command class used, set the
+ :attr:`command_class` attribute.
+
+ .. versionchanged:: 8.1
+ This decorator can be applied without parentheses.
+
+ .. versionchanged:: 8.0
+ Added the :attr:`command_class` attribute.
+ """
+ from .decorators import command
+
+ func: t.Callable[..., t.Any] | None = None
+
+ if args and callable(args[0]):
+ assert len(args) == 1 and not kwargs, (
+ "Use 'command(**kwargs)(callable)' to provide arguments."
+ )
+ (func,) = args
+ args = ()
+
+ if self.command_class and kwargs.get("cls") is None:
+ kwargs["cls"] = self.command_class
+
+ def decorator(f: t.Callable[..., t.Any]) -> Command:
+ cmd: Command = command(*args, **kwargs)(f)
+ self.add_command(cmd)
+ return cmd
+
+ if func is not None:
+ return decorator(func)
+
+ return decorator
+
+ @t.overload
+ def group(self, __func: t.Callable[..., t.Any]) -> Group: ...
+
+ @t.overload
+ def group(
+ self, *args: t.Any, **kwargs: t.Any
+ ) -> t.Callable[[t.Callable[..., t.Any]], Group]: ...
+
+ def group(
+ self, *args: t.Any, **kwargs: t.Any
+ ) -> t.Callable[[t.Callable[..., t.Any]], Group] | Group:
+ """A shortcut decorator for declaring and attaching a group to
+ the group. This takes the same arguments as :func:`group` and
+ immediately registers the created group with this group by
+ calling :meth:`add_command`.
+
+ To customize the group class used, set the :attr:`group_class`
+ attribute.
+
+ .. versionchanged:: 8.1
+ This decorator can be applied without parentheses.
+
+ .. versionchanged:: 8.0
+ Added the :attr:`group_class` attribute.
+ """
+ from .decorators import group
+
+ func: t.Callable[..., t.Any] | None = None
+
+ if args and callable(args[0]):
+ assert len(args) == 1 and not kwargs, (
+ "Use 'group(**kwargs)(callable)' to provide arguments."
+ )
+ (func,) = args
+ args = ()
+
+ if self.group_class is not None and kwargs.get("cls") is None:
+ if self.group_class is type:
+ kwargs["cls"] = type(self)
+ else:
+ kwargs["cls"] = self.group_class
+
+ def decorator(f: t.Callable[..., t.Any]) -> Group:
+ cmd: Group = group(*args, **kwargs)(f)
+ self.add_command(cmd)
+ return cmd
+
+ if func is not None:
+ return decorator(func)
+
+ return decorator
+
+ def result_callback(self, replace: bool = False) -> t.Callable[[F], F]:
+ """Adds a result callback to the command. By default if a
+ result callback is already registered this will chain them but
+ this can be disabled with the `replace` parameter. The result
+ callback is invoked with the return value of the subcommand
+ (or the list of return values from all subcommands if chaining
+ is enabled) as well as the parameters as they would be passed
+ to the main callback.
+
+ Example::
+
+ @click.group()
+ @click.option('-i', '--input', default=23)
+ def cli(input):
+ return 42
+
+ @cli.result_callback()
+ def process_result(result, input):
+ return result + input
+
+ :param replace: if set to `True` an already existing result
+ callback will be removed.
+
+ .. versionchanged:: 8.0
+ Renamed from ``resultcallback``.
+
+ .. versionadded:: 3.0
+ """
+
+ def decorator(f: F) -> F:
+ old_callback = self._result_callback
+
+ if old_callback is None or replace:
+ self._result_callback = f
+ return f
+
+ def function(value: t.Any, /, *args: t.Any, **kwargs: t.Any) -> t.Any:
+ inner = old_callback(value, *args, **kwargs)
+ return f(inner, *args, **kwargs)
+
+ self._result_callback = rv = update_wrapper(t.cast(F, function), f)
+ return rv # type: ignore[return-value]
+
+ return decorator
+
+ def get_command(self, ctx: Context, cmd_name: str) -> Command | None:
+ """Given a context and a command name, this returns a :class:`Command`
+ object if it exists or returns ``None``.
+ """
+ return self.commands.get(cmd_name)
+
+ def list_commands(self, ctx: Context) -> list[str]:
+ """Returns a list of subcommand names in the order they should appear."""
+ return sorted(self.commands)
+
+ def collect_usage_pieces(self, ctx: Context) -> list[str]:
+ rv = super().collect_usage_pieces(ctx)
+ rv.append(self.subcommand_metavar)
+ return rv
+
+ def format_options(self, ctx: Context, formatter: HelpFormatter) -> None:
+ super().format_options(ctx, formatter)
+ self.format_commands(ctx, formatter)
+
+ def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None:
+ """Extra format methods for multi methods that adds all the commands
+ after the options.
+ """
+ commands = []
+ for subcommand in self.list_commands(ctx):
+ cmd = self.get_command(ctx, subcommand)
+ # What is this, the tool lied about a command. Ignore it
+ if cmd is None:
+ continue
+ if cmd.hidden:
+ continue
+
+ commands.append((subcommand, cmd))
+
+ # allow for 3 times the default spacing
+ if len(commands):
+ limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands)
+
+ rows = []
+ for subcommand, cmd in commands:
+ help = cmd.get_short_help_str(limit)
+ rows.append((subcommand, help))
+
+ if rows:
+ with formatter.section(_("Commands")):
+ formatter.write_dl(rows)
+
+ def parse_args(self, ctx: Context, args: list[str]) -> list[str]:
+ if not args and self.no_args_is_help and not ctx.resilient_parsing:
+ raise NoArgsIsHelpError(ctx)
+
+ rest = super().parse_args(ctx, args)
+
+ if self.chain:
+ ctx._protected_args = rest
+ ctx.args = []
+ elif rest:
+ ctx._protected_args, ctx.args = rest[:1], rest[1:]
+
+ return ctx.args
+
+ def invoke(self, ctx: Context) -> t.Any:
+ def _process_result(value: t.Any) -> t.Any:
+ if self._result_callback is not None:
+ value = ctx.invoke(self._result_callback, value, **ctx.params)
+ return value
+
+ if not ctx._protected_args:
+ if self.invoke_without_command:
+ # No subcommand was invoked, so the result callback is
+ # invoked with the group return value for regular
+ # groups, or an empty list for chained groups.
+ with ctx:
+ rv = super().invoke(ctx)
+ return _process_result([] if self.chain else rv)
+ ctx.fail(_("Missing command."))
+
+ # Fetch args back out
+ args = [*ctx._protected_args, *ctx.args]
+ ctx.args = []
+ ctx._protected_args = []
+
+ # If we're not in chain mode, we only allow the invocation of a
+ # single command but we also inform the current context about the
+ # name of the command to invoke.
+ if not self.chain:
+ # Make sure the context is entered so we do not clean up
+ # resources until the result processor has worked.
+ with ctx:
+ cmd_name, cmd, args = self.resolve_command(ctx, args)
+ assert cmd is not None
+ ctx.invoked_subcommand = cmd_name
+ super().invoke(ctx)
+ sub_ctx = cmd.make_context(cmd_name, args, parent=ctx)
+ with sub_ctx:
+ return _process_result(sub_ctx.command.invoke(sub_ctx))
+
+ # In chain mode we create the contexts step by step, but after the
+ # base command has been invoked. Because at that point we do not
+ # know the subcommands yet, the invoked subcommand attribute is
+ # set to ``*`` to inform the command that subcommands are executed
+ # but nothing else.
+ with ctx:
+ ctx.invoked_subcommand = "*" if args else None
+ super().invoke(ctx)
+
+ # Otherwise we make every single context and invoke them in a
+ # chain. In that case the return value to the result processor
+ # is the list of all invoked subcommand's results.
+ contexts = []
+ while args:
+ cmd_name, cmd, args = self.resolve_command(ctx, args)
+ assert cmd is not None
+ sub_ctx = cmd.make_context(
+ cmd_name,
+ args,
+ parent=ctx,
+ allow_extra_args=True,
+ allow_interspersed_args=False,
+ )
+ contexts.append(sub_ctx)
+ args, sub_ctx.args = sub_ctx.args, []
+
+ rv = []
+ for sub_ctx in contexts:
+ with sub_ctx:
+ rv.append(sub_ctx.command.invoke(sub_ctx))
+ return _process_result(rv)
+
+ def resolve_command(
+ self, ctx: Context, args: list[str]
+ ) -> tuple[str | None, Command | None, list[str]]:
+ cmd_name = make_str(args[0])
+ original_cmd_name = cmd_name
+
+ # Get the command
+ cmd = self.get_command(ctx, cmd_name)
+
+ # If we can't find the command but there is a normalization
+ # function available, we try with that one.
+ if cmd is None and ctx.token_normalize_func is not None:
+ cmd_name = ctx.token_normalize_func(cmd_name)
+ cmd = self.get_command(ctx, cmd_name)
+
+ # If we don't find the command we want to show an error message
+ # to the user that it was not provided. However, there is
+ # something else we should do: if the first argument looks like
+ # an option we want to kick off parsing again for arguments to
+ # resolve things like --help which now should go to the main
+ # place.
+ if cmd is None and not ctx.resilient_parsing:
+ if _split_opt(cmd_name)[0]:
+ self.parse_args(ctx, args)
+ ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name))
+ return cmd_name if cmd else None, cmd, args[1:]
+
+ def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]:
+ """Return a list of completions for the incomplete value. Looks
+ at the names of options, subcommands, and chained
+ multi-commands.
+
+ :param ctx: Invocation context for this command.
+ :param incomplete: Value being completed. May be empty.
+
+ .. versionadded:: 8.0
+ """
+ from click.shell_completion import CompletionItem
+
+ results = [
+ CompletionItem(name, help=command.get_short_help_str())
+ for name, command in _complete_visible_commands(ctx, incomplete)
+ ]
+ results.extend(super().shell_complete(ctx, incomplete))
+ return results
+
+
+class _MultiCommand(Group, metaclass=_FakeSubclassCheck):
+ """
+ .. deprecated:: 8.2
+ Will be removed in Click 9.0. Use ``Group`` instead.
+ """
+
+
+class CommandCollection(Group):
+ """A :class:`Group` that looks up subcommands on other groups. If a command
+ is not found on this group, each registered source is checked in order.
+ Parameters on a source are not added to this group, and a source's callback
+ is not invoked when invoking its commands. In other words, this "flattens"
+ commands in many groups into this one group.
+
+ :param name: The name of the group command.
+ :param sources: A list of :class:`Group` objects to look up commands from.
+ :param kwargs: Other arguments passed to :class:`Group`.
+
+ .. versionchanged:: 8.2
+ This is a subclass of ``Group``. Commands are looked up first on this
+ group, then each of its sources.
+ """
+
+ def __init__(
+ self,
+ name: str | None = None,
+ sources: list[Group] | None = None,
+ **kwargs: t.Any,
+ ) -> None:
+ super().__init__(name, **kwargs)
+ #: The list of registered groups.
+ self.sources: list[Group] = sources or []
+
+ def add_source(self, group: Group) -> None:
+ """Add a group as a source of commands."""
+ self.sources.append(group)
+
+ def get_command(self, ctx: Context, cmd_name: str) -> Command | None:
+ rv = super().get_command(ctx, cmd_name)
+
+ if rv is not None:
+ return rv
+
+ for source in self.sources:
+ rv = source.get_command(ctx, cmd_name)
+
+ if rv is not None:
+ if self.chain:
+ _check_nested_chain(self, cmd_name, rv)
+
+ return rv
+
+ return None
+
+ def list_commands(self, ctx: Context) -> list[str]:
+ rv: set[str] = set(super().list_commands(ctx))
+
+ for source in self.sources:
+ rv.update(source.list_commands(ctx))
+
+ return sorted(rv)
+
+
+def _check_iter(value: t.Any) -> cabc.Iterator[t.Any]:
+ """Check if the value is iterable but not a string. Raises a type
+ error, or return an iterator over the value.
+ """
+ if isinstance(value, str):
+ raise TypeError
+
+ return iter(value)
+
+
+class Parameter:
+ r"""A parameter to a command comes in two versions: they are either
+ :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently
+ not supported by design as some of the internals for parsing are
+ intentionally not finalized.
+
+ Some settings are supported by both options and arguments.
+
+ :param param_decls: the parameter declarations for this option or
+ argument. This is a list of flags or argument
+ names.
+ :param type: the type that should be used. Either a :class:`ParamType`
+ or a Python type. The latter is converted into the former
+ automatically if supported.
+ :param required: controls if this is optional or not.
+ :param default: the default value if omitted. This can also be a callable,
+ in which case it's invoked when the default is needed
+ without any arguments.
+ :param callback: A function to further process or validate the value
+ after type conversion. It is called as ``f(ctx, param, value)``
+ and must return the value. It is called for all sources,
+ including prompts.
+ :param nargs: the number of arguments to match. If not ``1`` the return
+ value is a tuple instead of single value. The default for
+ nargs is ``1`` (except if the type is a tuple, then it's
+ the arity of the tuple). If ``nargs=-1``, all remaining
+ parameters are collected.
+ :param metavar: how the value is represented in the help page.
+ :param expose_value: if this is `True` then the value is passed onwards
+ to the command callback and stored on the context,
+ otherwise it's skipped.
+ :param is_eager: eager values are processed before non eager ones. This
+ should not be set for arguments or it will inverse the
+ order of processing.
+ :param envvar: environment variable(s) that are used to provide a default value for
+ this parameter. This can be a string or a sequence of strings. If a sequence is
+ given, only the first non-empty environment variable is used for the parameter.
+ :param shell_complete: A function that returns custom shell
+ completions. Used instead of the param's type completion if
+ given. Takes ``ctx, param, incomplete`` and must return a list
+ of :class:`~click.shell_completion.CompletionItem` or a list of
+ strings.
+ :param deprecated: If ``True`` or non-empty string, issues a message
+ indicating that the argument is deprecated and highlights
+ its deprecation in --help. The message can be customized
+ by using a string as the value. A deprecated parameter
+ cannot be required, a ValueError will be raised otherwise.
+
+ .. versionchanged:: 8.2.0
+ Introduction of ``deprecated``.
+
+ .. versionchanged:: 8.2
+ Adding duplicate parameter names to a :class:`~click.core.Command` will
+ result in a ``UserWarning`` being shown.
+
+ .. versionchanged:: 8.2
+ Adding duplicate parameter names to a :class:`~click.core.Command` will
+ result in a ``UserWarning`` being shown.
+
+ .. versionchanged:: 8.0
+ ``process_value`` validates required parameters and bounded
+ ``nargs``, and invokes the parameter callback before returning
+ the value. This allows the callback to validate prompts.
+ ``full_process_value`` is removed.
+
+ .. versionchanged:: 8.0
+ ``autocompletion`` is renamed to ``shell_complete`` and has new
+ semantics described above. The old name is deprecated and will
+ be removed in 8.1, until then it will be wrapped to match the
+ new requirements.
+
+ .. versionchanged:: 8.0
+ For ``multiple=True, nargs>1``, the default must be a list of
+ tuples.
+
+ .. versionchanged:: 8.0
+ Setting a default is no longer required for ``nargs>1``, it will
+ default to ``None``. ``multiple=True`` or ``nargs=-1`` will
+ default to ``()``.
+
+ .. versionchanged:: 7.1
+ Empty environment variables are ignored rather than taking the
+ empty string value. This makes it possible for scripts to clear
+ variables if they can't unset them.
+
+ .. versionchanged:: 2.0
+ Changed signature for parameter callback to also be passed the
+ parameter. The old callback format will still work, but it will
+ raise a warning to give you a chance to migrate the code easier.
+ """
+
+ param_type_name = "parameter"
+
+ def __init__(
+ self,
+ param_decls: cabc.Sequence[str] | None = None,
+ type: types.ParamType | t.Any | None = None,
+ required: bool = False,
+ # XXX The default historically embed two concepts:
+ # - the declaration of a Parameter object carrying the default (handy to
+ # arbitrage the default value of coupled Parameters sharing the same
+ # self.name, like flag options),
+ # - and the actual value of the default.
+ # It is confusing and is the source of many issues discussed in:
+ # https://github.com/pallets/click/pull/3030
+ # In the future, we might think of splitting it in two, not unlike
+ # Option.is_flag and Option.flag_value: we could have something like
+ # Parameter.is_default and Parameter.default_value.
+ default: t.Any | t.Callable[[], t.Any] | None = UNSET,
+ callback: t.Callable[[Context, Parameter, t.Any], t.Any] | None = None,
+ nargs: int | None = None,
+ multiple: bool = False,
+ metavar: str | None = None,
+ expose_value: bool = True,
+ is_eager: bool = False,
+ envvar: str | cabc.Sequence[str] | None = None,
+ shell_complete: t.Callable[
+ [Context, Parameter, str], list[CompletionItem] | list[str]
+ ]
+ | None = None,
+ deprecated: bool | str = False,
+ ) -> None:
+ self.name: str | None
+ self.opts: list[str]
+ self.secondary_opts: list[str]
+ self.name, self.opts, self.secondary_opts = self._parse_decls(
+ param_decls or (), expose_value
+ )
+ self.type: types.ParamType = types.convert_type(type, default)
+
+ # Default nargs to what the type tells us if we have that
+ # information available.
+ if nargs is None:
+ if self.type.is_composite:
+ nargs = self.type.arity
+ else:
+ nargs = 1
+
+ self.required = required
+ self.callback = callback
+ self.nargs = nargs
+ self.multiple = multiple
+ self.expose_value = expose_value
+ self.default: t.Any | t.Callable[[], t.Any] | None = default
+ self.is_eager = is_eager
+ self.metavar = metavar
+ self.envvar = envvar
+ self._custom_shell_complete = shell_complete
+ self.deprecated = deprecated
+
+ if __debug__:
+ if self.type.is_composite and nargs != self.type.arity:
+ raise ValueError(
+ f"'nargs' must be {self.type.arity} (or None) for"
+ f" type {self.type!r}, but it was {nargs}."
+ )
+
+ if required and deprecated:
+ raise ValueError(
+ f"The {self.param_type_name} '{self.human_readable_name}' "
+ "is deprecated and still required. A deprecated "
+ f"{self.param_type_name} cannot be required."
+ )
+
+ def to_info_dict(self) -> dict[str, t.Any]:
+ """Gather information that could be useful for a tool generating
+ user-facing documentation.
+
+ Use :meth:`click.Context.to_info_dict` to traverse the entire
+ CLI structure.
+
+ .. versionchanged:: 8.3.0
+ Returns ``None`` for the :attr:`default` if it was not set.
+
+ .. versionadded:: 8.0
+ """
+ return {
+ "name": self.name,
+ "param_type_name": self.param_type_name,
+ "opts": self.opts,
+ "secondary_opts": self.secondary_opts,
+ "type": self.type.to_info_dict(),
+ "required": self.required,
+ "nargs": self.nargs,
+ "multiple": self.multiple,
+ # We explicitly hide the :attr:`UNSET` value to the user, as we choose to
+ # make it an implementation detail. And because ``to_info_dict`` has been
+ # designed for documentation purposes, we return ``None`` instead.
+ "default": self.default if self.default is not UNSET else None,
+ "envvar": self.envvar,
+ }
+
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__} {self.name}>"
+
+ def _parse_decls(
+ self, decls: cabc.Sequence[str], expose_value: bool
+ ) -> tuple[str | None, list[str], list[str]]:
+ raise NotImplementedError()
+
+ @property
+ def human_readable_name(self) -> str:
+ """Returns the human readable name of this parameter. This is the
+ same as the name for options, but the metavar for arguments.
+ """
+ return self.name # type: ignore
+
+ def make_metavar(self, ctx: Context) -> str:
+ if self.metavar is not None:
+ return self.metavar
+
+ metavar = self.type.get_metavar(param=self, ctx=ctx)
+
+ if metavar is None:
+ metavar = self.type.name.upper()
+
+ if self.nargs != 1:
+ metavar += "..."
+
+ return metavar
+
+ @t.overload
+ def get_default(
+ self, ctx: Context, call: t.Literal[True] = True
+ ) -> t.Any | None: ...
+
+ @t.overload
+ def get_default(
+ self, ctx: Context, call: bool = ...
+ ) -> t.Any | t.Callable[[], t.Any] | None: ...
+
+ def get_default(
+ self, ctx: Context, call: bool = True
+ ) -> t.Any | t.Callable[[], t.Any] | None:
+ """Get the default for the parameter. Tries
+ :meth:`Context.lookup_default` first, then the local default.
+
+ :param ctx: Current context.
+ :param call: If the default is a callable, call it. Disable to
+ return the callable instead.
+
+ .. versionchanged:: 8.0.2
+ Type casting is no longer performed when getting a default.
+
+ .. versionchanged:: 8.0.1
+ Type casting can fail in resilient parsing mode. Invalid
+ defaults will not prevent showing help text.
+
+ .. versionchanged:: 8.0
+ Looks at ``ctx.default_map`` first.
+
+ .. versionchanged:: 8.0
+ Added the ``call`` parameter.
+ """
+ value = ctx.lookup_default(self.name, call=False) # type: ignore
+
+ if value is UNSET:
+ value = self.default
+
+ if call and callable(value):
+ value = value()
+
+ return value
+
+ def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None:
+ raise NotImplementedError()
+
+ def consume_value(
+ self, ctx: Context, opts: cabc.Mapping[str, t.Any]
+ ) -> tuple[t.Any, ParameterSource]:
+ """Returns the parameter value produced by the parser.
+
+ If the parser did not produce a value from user input, the value is either
+ sourced from the environment variable, the default map, or the parameter's
+ default value. In that order of precedence.
+
+ If no value is found, an internal sentinel value is returned.
+
+ :meta private:
+ """
+ # Collect from the parse the value passed by the user to the CLI.
+ value = opts.get(self.name, UNSET) # type: ignore
+ # If the value is set, it means it was sourced from the command line by the
+ # parser, otherwise it left unset by default.
+ source = (
+ ParameterSource.COMMANDLINE
+ if value is not UNSET
+ else ParameterSource.DEFAULT
+ )
+
+ if value is UNSET:
+ envvar_value = self.value_from_envvar(ctx)
+ if envvar_value is not None:
+ value = envvar_value
+ source = ParameterSource.ENVIRONMENT
+
+ if value is UNSET:
+ default_map_value = ctx.lookup_default(self.name) # type: ignore
+ if default_map_value is not UNSET:
+ value = default_map_value
+ source = ParameterSource.DEFAULT_MAP
+
+ if value is UNSET:
+ default_value = self.get_default(ctx)
+ if default_value is not UNSET:
+ value = default_value
+ source = ParameterSource.DEFAULT
+
+ return value, source
+
+ def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any:
+ """Convert and validate a value against the parameter's
+ :attr:`type`, :attr:`multiple`, and :attr:`nargs`.
+ """
+ if value is None:
+ if self.multiple or self.nargs == -1:
+ return ()
+ else:
+ return value
+
+ def check_iter(value: t.Any) -> cabc.Iterator[t.Any]:
+ try:
+ return _check_iter(value)
+ except TypeError:
+ # This should only happen when passing in args manually,
+ # the parser should construct an iterable when parsing
+ # the command line.
+ raise BadParameter(
+ _("Value must be an iterable."), ctx=ctx, param=self
+ ) from None
+
+ # Define the conversion function based on nargs and type.
+
+ if self.nargs == 1 or self.type.is_composite:
+
+ def convert(value: t.Any) -> t.Any:
+ return self.type(value, param=self, ctx=ctx)
+
+ elif self.nargs == -1:
+
+ def convert(value: t.Any) -> t.Any: # tuple[t.Any, ...]
+ return tuple(self.type(x, self, ctx) for x in check_iter(value))
+
+ else: # nargs > 1
+
+ def convert(value: t.Any) -> t.Any: # tuple[t.Any, ...]
+ value = tuple(check_iter(value))
+
+ if len(value) != self.nargs:
+ raise BadParameter(
+ ngettext(
+ "Takes {nargs} values but 1 was given.",
+ "Takes {nargs} values but {len} were given.",
+ len(value),
+ ).format(nargs=self.nargs, len=len(value)),
+ ctx=ctx,
+ param=self,
+ )
+
+ return tuple(self.type(x, self, ctx) for x in value)
+
+ if self.multiple:
+ return tuple(convert(x) for x in check_iter(value))
+
+ return convert(value)
+
+ def value_is_missing(self, value: t.Any) -> bool:
+ """A value is considered missing if:
+
+ - it is :attr:`UNSET`,
+ - or if it is an empty sequence while the parameter is suppose to have
+ non-single value (i.e. :attr:`nargs` is not ``1`` or :attr:`multiple` is
+ set).
+
+ :meta private:
+ """
+ if value is UNSET:
+ return True
+
+ if (self.nargs != 1 or self.multiple) and value == ():
+ return True
+
+ return False
+
+ def process_value(self, ctx: Context, value: t.Any) -> t.Any:
+ """Process the value of this parameter:
+
+ 1. Type cast the value using :meth:`type_cast_value`.
+ 2. Check if the value is missing (see: :meth:`value_is_missing`), and raise
+ :exc:`MissingParameter` if it is required.
+ 3. If a :attr:`callback` is set, call it to have the value replaced by the
+ result of the callback. If the value was not set, the callback receive
+ ``None``. This keep the legacy behavior as it was before the introduction of
+ the :attr:`UNSET` sentinel.
+
+ :meta private:
+ """
+ # shelter `type_cast_value` from ever seeing an `UNSET` value by handling the
+ # cases in which `UNSET` gets special treatment explicitly at this layer
+ #
+ # Refs:
+ # https://github.com/pallets/click/issues/3069
+ if value is UNSET:
+ if self.multiple or self.nargs == -1:
+ value = ()
+ else:
+ value = self.type_cast_value(ctx, value)
+
+ if self.required and self.value_is_missing(value):
+ raise MissingParameter(ctx=ctx, param=self)
+
+ if self.callback is not None:
+ # Legacy case: UNSET is not exposed directly to the callback, but converted
+ # to None.
+ if value is UNSET:
+ value = None
+
+ # Search for parameters with UNSET values in the context.
+ unset_keys = {k: None for k, v in ctx.params.items() if v is UNSET}
+ # No UNSET values, call the callback as usual.
+ if not unset_keys:
+ value = self.callback(ctx, self, value)
+
+ # Legacy case: provide a temporarily manipulated context to the callback
+ # to hide UNSET values as None.
+ #
+ # Refs:
+ # https://github.com/pallets/click/issues/3136
+ # https://github.com/pallets/click/pull/3137
+ else:
+ # Add another layer to the context stack to clearly hint that the
+ # context is temporarily modified.
+ with ctx:
+ # Update the context parameters to replace UNSET with None.
+ ctx.params.update(unset_keys)
+ # Feed these fake context parameters to the callback.
+ value = self.callback(ctx, self, value)
+ # Restore the UNSET values in the context parameters.
+ ctx.params.update(
+ {
+ k: UNSET
+ for k in unset_keys
+ # Only restore keys that are present and still None, in case
+ # the callback modified other parameters.
+ if k in ctx.params and ctx.params[k] is None
+ }
+ )
+
+ return value
+
+ def resolve_envvar_value(self, ctx: Context) -> str | None:
+ """Returns the value found in the environment variable(s) attached to this
+ parameter.
+
+ Environment variables values are `always returned as strings
+ `_.
+
+ This method returns ``None`` if:
+
+ - the :attr:`envvar` property is not set on the :class:`Parameter`,
+ - the environment variable is not found in the environment,
+ - the variable is found in the environment but its value is empty (i.e. the
+ environment variable is present but has an empty string).
+
+ If :attr:`envvar` is setup with multiple environment variables,
+ then only the first non-empty value is returned.
+
+ .. caution::
+
+ The raw value extracted from the environment is not normalized and is
+ returned as-is. Any normalization or reconciliation is performed later by
+ the :class:`Parameter`'s :attr:`type`.
+
+ :meta private:
+ """
+ if not self.envvar:
+ return None
+
+ if isinstance(self.envvar, str):
+ rv = os.environ.get(self.envvar)
+
+ if rv:
+ return rv
+ else:
+ for envvar in self.envvar:
+ rv = os.environ.get(envvar)
+
+ # Return the first non-empty value of the list of environment variables.
+ if rv:
+ return rv
+ # Else, absence of value is interpreted as an environment variable that
+ # is not set, so proceed to the next one.
+
+ return None
+
+ def value_from_envvar(self, ctx: Context) -> str | cabc.Sequence[str] | None:
+ """Process the raw environment variable string for this parameter.
+
+ Returns the string as-is or splits it into a sequence of strings if the
+ parameter is expecting multiple values (i.e. its :attr:`nargs` property is set
+ to a value other than ``1``).
+
+ :meta private:
+ """
+ rv = self.resolve_envvar_value(ctx)
+
+ if rv is not None and self.nargs != 1:
+ return self.type.split_envvar_value(rv)
+
+ return rv
+
+ def handle_parse_result(
+ self, ctx: Context, opts: cabc.Mapping[str, t.Any], args: list[str]
+ ) -> tuple[t.Any, list[str]]:
+ """Process the value produced by the parser from user input.
+
+ Always process the value through the Parameter's :attr:`type`, wherever it
+ comes from.
+
+ If the parameter is deprecated, this method warn the user about it. But only if
+ the value has been explicitly set by the user (and as such, is not coming from
+ a default).
+
+ :meta private:
+ """
+ with augment_usage_errors(ctx, param=self):
+ value, source = self.consume_value(ctx, opts)
+
+ ctx.set_parameter_source(self.name, source) # type: ignore
+
+ # Display a deprecation warning if necessary.
+ if (
+ self.deprecated
+ and value is not UNSET
+ and source not in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP)
+ ):
+ extra_message = (
+ f" {self.deprecated}" if isinstance(self.deprecated, str) else ""
+ )
+ message = _(
+ "DeprecationWarning: The {param_type} {name!r} is deprecated."
+ "{extra_message}"
+ ).format(
+ param_type=self.param_type_name,
+ name=self.human_readable_name,
+ extra_message=extra_message,
+ )
+ echo(style(message, fg="red"), err=True)
+
+ # Process the value through the parameter's type.
+ try:
+ value = self.process_value(ctx, value)
+ except Exception:
+ if not ctx.resilient_parsing:
+ raise
+ # In resilient parsing mode, we do not want to fail the command if the
+ # value is incompatible with the parameter type, so we reset the value
+ # to UNSET, which will be interpreted as a missing value.
+ value = UNSET
+
+ # Add parameter's value to the context.
+ if (
+ self.expose_value
+ # We skip adding the value if it was previously set by another parameter
+ # targeting the same variable name. This prevents parameters competing for
+ # the same name to override each other.
+ and (self.name not in ctx.params or ctx.params[self.name] is UNSET)
+ ):
+ # Click is logically enforcing that the name is None if the parameter is
+ # not to be exposed. We still assert it here to please the type checker.
+ assert self.name is not None, (
+ f"{self!r} parameter's name should not be None when exposing value."
+ )
+ ctx.params[self.name] = value
+
+ return value, args
+
+ def get_help_record(self, ctx: Context) -> tuple[str, str] | None:
+ pass
+
+ def get_usage_pieces(self, ctx: Context) -> list[str]:
+ return []
+
+ def get_error_hint(self, ctx: Context) -> str:
+ """Get a stringified version of the param for use in error messages to
+ indicate which param caused the error.
+ """
+ hint_list = self.opts or [self.human_readable_name]
+ return " / ".join(f"'{x}'" for x in hint_list)
+
+ def shell_complete(self, ctx: Context, incomplete: str) -> list[CompletionItem]:
+ """Return a list of completions for the incomplete value. If a
+ ``shell_complete`` function was given during init, it is used.
+ Otherwise, the :attr:`type`
+ :meth:`~click.types.ParamType.shell_complete` function is used.
+
+ :param ctx: Invocation context for this command.
+ :param incomplete: Value being completed. May be empty.
+
+ .. versionadded:: 8.0
+ """
+ if self._custom_shell_complete is not None:
+ results = self._custom_shell_complete(ctx, self, incomplete)
+
+ if results and isinstance(results[0], str):
+ from click.shell_completion import CompletionItem
+
+ results = [CompletionItem(c) for c in results]
+
+ return t.cast("list[CompletionItem]", results)
+
+ return self.type.shell_complete(ctx, self, incomplete)
+
+
+class Option(Parameter):
+ """Options are usually optional values on the command line and
+ have some extra features that arguments don't have.
+
+ All other parameters are passed onwards to the parameter constructor.
+
+ :param show_default: Show the default value for this option in its
+ help text. Values are not shown by default, unless
+ :attr:`Context.show_default` is ``True``. If this value is a
+ string, it shows that string in parentheses instead of the
+ actual value. This is particularly useful for dynamic options.
+ For single option boolean flags, the default remains hidden if
+ its value is ``False``.
+ :param show_envvar: Controls if an environment variable should be
+ shown on the help page and error messages.
+ Normally, environment variables are not shown.
+ :param prompt: If set to ``True`` or a non empty string then the
+ user will be prompted for input. If set to ``True`` the prompt
+ will be the option name capitalized. A deprecated option cannot be
+ prompted.
+ :param confirmation_prompt: Prompt a second time to confirm the
+ value if it was prompted for. Can be set to a string instead of
+ ``True`` to customize the message.
+ :param prompt_required: If set to ``False``, the user will be
+ prompted for input only when the option was specified as a flag
+ without a value.
+ :param hide_input: If this is ``True`` then the input on the prompt
+ will be hidden from the user. This is useful for password input.
+ :param is_flag: forces this option to act as a flag. The default is
+ auto detection.
+ :param flag_value: which value should be used for this flag if it's
+ enabled. This is set to a boolean automatically if
+ the option string contains a slash to mark two options.
+ :param multiple: if this is set to `True` then the argument is accepted
+ multiple times and recorded. This is similar to ``nargs``
+ in how it works but supports arbitrary number of
+ arguments.
+ :param count: this flag makes an option increment an integer.
+ :param allow_from_autoenv: if this is enabled then the value of this
+ parameter will be pulled from an environment
+ variable in case a prefix is defined on the
+ context.
+ :param help: the help string.
+ :param hidden: hide this option from help outputs.
+ :param attrs: Other command arguments described in :class:`Parameter`.
+
+ .. versionchanged:: 8.2
+ ``envvar`` used with ``flag_value`` will always use the ``flag_value``,
+ previously it would use the value of the environment variable.
+
+ .. versionchanged:: 8.1
+ Help text indentation is cleaned here instead of only in the
+ ``@option`` decorator.
+
+ .. versionchanged:: 8.1
+ The ``show_default`` parameter overrides
+ ``Context.show_default``.
+
+ .. versionchanged:: 8.1
+ The default of a single option boolean flag is not shown if the
+ default value is ``False``.
+
+ .. versionchanged:: 8.0.1
+ ``type`` is detected from ``flag_value`` if given.
+ """
+
+ param_type_name = "option"
+
+ def __init__(
+ self,
+ param_decls: cabc.Sequence[str] | None = None,
+ show_default: bool | str | None = None,
+ prompt: bool | str = False,
+ confirmation_prompt: bool | str = False,
+ prompt_required: bool = True,
+ hide_input: bool = False,
+ is_flag: bool | None = None,
+ flag_value: t.Any = UNSET,
+ multiple: bool = False,
+ count: bool = False,
+ allow_from_autoenv: bool = True,
+ type: types.ParamType | t.Any | None = None,
+ help: str | None = None,
+ hidden: bool = False,
+ show_choices: bool = True,
+ show_envvar: bool = False,
+ deprecated: bool | str = False,
+ **attrs: t.Any,
+ ) -> None:
+ if help:
+ help = inspect.cleandoc(help)
+
+ super().__init__(
+ param_decls, type=type, multiple=multiple, deprecated=deprecated, **attrs
+ )
+
+ if prompt is True:
+ if self.name is None:
+ raise TypeError("'name' is required with 'prompt=True'.")
+
+ prompt_text: str | None = self.name.replace("_", " ").capitalize()
+ elif prompt is False:
+ prompt_text = None
+ else:
+ prompt_text = prompt
+
+ if deprecated:
+ deprecated_message = (
+ f"(DEPRECATED: {deprecated})"
+ if isinstance(deprecated, str)
+ else "(DEPRECATED)"
+ )
+ help = help + deprecated_message if help is not None else deprecated_message
+
+ self.prompt = prompt_text
+ self.confirmation_prompt = confirmation_prompt
+ self.prompt_required = prompt_required
+ self.hide_input = hide_input
+ self.hidden = hidden
+
+ # The _flag_needs_value property tells the parser that this option is a flag
+ # that cannot be used standalone and needs a value. With this information, the
+ # parser can determine whether to consider the next user-provided argument in
+ # the CLI as a value for this flag or as a new option.
+ # If prompt is enabled but not required, then it opens the possibility for the
+ # option to gets its value from the user.
+ self._flag_needs_value = self.prompt is not None and not self.prompt_required
+
+ # Auto-detect if this is a flag or not.
+ if is_flag is None:
+ # Implicitly a flag because flag_value was set.
+ if flag_value is not UNSET:
+ is_flag = True
+ # Not a flag, but when used as a flag it shows a prompt.
+ elif self._flag_needs_value:
+ is_flag = False
+ # Implicitly a flag because secondary options names were given.
+ elif self.secondary_opts:
+ is_flag = True
+ # The option is explicitly not a flag. But we do not know yet if it needs a
+ # value or not. So we look at the default value to determine it.
+ elif is_flag is False and not self._flag_needs_value:
+ self._flag_needs_value = self.default is UNSET
+
+ if is_flag:
+ # Set missing default for flags if not explicitly required or prompted.
+ if self.default is UNSET and not self.required and not self.prompt:
+ if multiple:
+ self.default = ()
+
+ # Auto-detect the type of the flag based on the flag_value.
+ if type is None:
+ # A flag without a flag_value is a boolean flag.
+ if flag_value is UNSET:
+ self.type: types.ParamType = types.BoolParamType()
+ # If the flag value is a boolean, use BoolParamType.
+ elif isinstance(flag_value, bool):
+ self.type = types.BoolParamType()
+ # Otherwise, guess the type from the flag value.
+ else:
+ self.type = types.convert_type(None, flag_value)
+
+ self.is_flag: bool = bool(is_flag)
+ self.is_bool_flag: bool = bool(
+ is_flag and isinstance(self.type, types.BoolParamType)
+ )
+ self.flag_value: t.Any = flag_value
+
+ # Set boolean flag default to False if unset and not required.
+ if self.is_bool_flag:
+ if self.default is UNSET and not self.required:
+ self.default = False
+
+ # Support the special case of aligning the default value with the flag_value
+ # for flags whose default is explicitly set to True. Note that as long as we
+ # have this condition, there is no way a flag can have a default set to True,
+ # and a flag_value set to something else. Refs:
+ # https://github.com/pallets/click/issues/3024#issuecomment-3146199461
+ # https://github.com/pallets/click/pull/3030/commits/06847da
+ if self.default is True and self.flag_value is not UNSET:
+ self.default = self.flag_value
+
+ # Set the default flag_value if it is not set.
+ if self.flag_value is UNSET:
+ if self.is_flag:
+ self.flag_value = True
+ else:
+ self.flag_value = None
+
+ # Counting.
+ self.count = count
+ if count:
+ if type is None:
+ self.type = types.IntRange(min=0)
+ if self.default is UNSET:
+ self.default = 0
+
+ self.allow_from_autoenv = allow_from_autoenv
+ self.help = help
+ self.show_default = show_default
+ self.show_choices = show_choices
+ self.show_envvar = show_envvar
+
+ if __debug__:
+ if deprecated and prompt:
+ raise ValueError("`deprecated` options cannot use `prompt`.")
+
+ if self.nargs == -1:
+ raise TypeError("nargs=-1 is not supported for options.")
+
+ if not self.is_bool_flag and self.secondary_opts:
+ raise TypeError("Secondary flag is not valid for non-boolean flag.")
+
+ if self.is_bool_flag and self.hide_input and self.prompt is not None:
+ raise TypeError(
+ "'prompt' with 'hide_input' is not valid for boolean flag."
+ )
+
+ if self.count:
+ if self.multiple:
+ raise TypeError("'count' is not valid with 'multiple'.")
+
+ if self.is_flag:
+ raise TypeError("'count' is not valid with 'is_flag'.")
+
+ def to_info_dict(self) -> dict[str, t.Any]:
+ """
+ .. versionchanged:: 8.3.0
+ Returns ``None`` for the :attr:`flag_value` if it was not set.
+ """
+ info_dict = super().to_info_dict()
+ info_dict.update(
+ help=self.help,
+ prompt=self.prompt,
+ is_flag=self.is_flag,
+ # We explicitly hide the :attr:`UNSET` value to the user, as we choose to
+ # make it an implementation detail. And because ``to_info_dict`` has been
+ # designed for documentation purposes, we return ``None`` instead.
+ flag_value=self.flag_value if self.flag_value is not UNSET else None,
+ count=self.count,
+ hidden=self.hidden,
+ )
+ return info_dict
+
+ def get_error_hint(self, ctx: Context) -> str:
+ result = super().get_error_hint(ctx)
+ if self.show_envvar and self.envvar is not None:
+ result += f" (env var: '{self.envvar}')"
+ return result
+
+ def _parse_decls(
+ self, decls: cabc.Sequence[str], expose_value: bool
+ ) -> tuple[str | None, list[str], list[str]]:
+ opts = []
+ secondary_opts = []
+ name = None
+ possible_names = []
+
+ for decl in decls:
+ if decl.isidentifier():
+ if name is not None:
+ raise TypeError(f"Name '{name}' defined twice")
+ name = decl
+ else:
+ split_char = ";" if decl[:1] == "/" else "/"
+ if split_char in decl:
+ first, second = decl.split(split_char, 1)
+ first = first.rstrip()
+ if first:
+ possible_names.append(_split_opt(first))
+ opts.append(first)
+ second = second.lstrip()
+ if second:
+ secondary_opts.append(second.lstrip())
+ if first == second:
+ raise ValueError(
+ f"Boolean option {decl!r} cannot use the"
+ " same flag for true/false."
+ )
+ else:
+ possible_names.append(_split_opt(decl))
+ opts.append(decl)
+
+ if name is None and possible_names:
+ possible_names.sort(key=lambda x: -len(x[0])) # group long options first
+ name = possible_names[0][1].replace("-", "_").lower()
+ if not name.isidentifier():
+ name = None
+
+ if name is None:
+ if not expose_value:
+ return None, opts, secondary_opts
+ raise TypeError(
+ f"Could not determine name for option with declarations {decls!r}"
+ )
+
+ if not opts and not secondary_opts:
+ raise TypeError(
+ f"No options defined but a name was passed ({name})."
+ " Did you mean to declare an argument instead? Did"
+ f" you mean to pass '--{name}'?"
+ )
+
+ return name, opts, secondary_opts
+
+ def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None:
+ if self.multiple:
+ action = "append"
+ elif self.count:
+ action = "count"
+ else:
+ action = "store"
+
+ if self.is_flag:
+ action = f"{action}_const"
+
+ if self.is_bool_flag and self.secondary_opts:
+ parser.add_option(
+ obj=self, opts=self.opts, dest=self.name, action=action, const=True
+ )
+ parser.add_option(
+ obj=self,
+ opts=self.secondary_opts,
+ dest=self.name,
+ action=action,
+ const=False,
+ )
+ else:
+ parser.add_option(
+ obj=self,
+ opts=self.opts,
+ dest=self.name,
+ action=action,
+ const=self.flag_value,
+ )
+ else:
+ parser.add_option(
+ obj=self,
+ opts=self.opts,
+ dest=self.name,
+ action=action,
+ nargs=self.nargs,
+ )
+
+ def get_help_record(self, ctx: Context) -> tuple[str, str] | None:
+ if self.hidden:
+ return None
+
+ any_prefix_is_slash = False
+
+ def _write_opts(opts: cabc.Sequence[str]) -> str:
+ nonlocal any_prefix_is_slash
+
+ rv, any_slashes = join_options(opts)
+
+ if any_slashes:
+ any_prefix_is_slash = True
+
+ if not self.is_flag and not self.count:
+ rv += f" {self.make_metavar(ctx=ctx)}"
+
+ return rv
+
+ rv = [_write_opts(self.opts)]
+
+ if self.secondary_opts:
+ rv.append(_write_opts(self.secondary_opts))
+
+ help = self.help or ""
+
+ extra = self.get_help_extra(ctx)
+ extra_items = []
+ if "envvars" in extra:
+ extra_items.append(
+ _("env var: {var}").format(var=", ".join(extra["envvars"]))
+ )
+ if "default" in extra:
+ extra_items.append(_("default: {default}").format(default=extra["default"]))
+ if "range" in extra:
+ extra_items.append(extra["range"])
+ if "required" in extra:
+ extra_items.append(_(extra["required"]))
+
+ if extra_items:
+ extra_str = "; ".join(extra_items)
+ help = f"{help} [{extra_str}]" if help else f"[{extra_str}]"
+
+ return ("; " if any_prefix_is_slash else " / ").join(rv), help
+
+ def get_help_extra(self, ctx: Context) -> types.OptionHelpExtra:
+ extra: types.OptionHelpExtra = {}
+
+ if self.show_envvar:
+ envvar = self.envvar
+
+ if envvar is None:
+ if (
+ self.allow_from_autoenv
+ and ctx.auto_envvar_prefix is not None
+ and self.name is not None
+ ):
+ envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}"
+
+ if envvar is not None:
+ if isinstance(envvar, str):
+ extra["envvars"] = (envvar,)
+ else:
+ extra["envvars"] = tuple(str(d) for d in envvar)
+
+ # Temporarily enable resilient parsing to avoid type casting
+ # failing for the default. Might be possible to extend this to
+ # help formatting in general.
+ resilient = ctx.resilient_parsing
+ ctx.resilient_parsing = True
+
+ try:
+ default_value = self.get_default(ctx, call=False)
+ finally:
+ ctx.resilient_parsing = resilient
+
+ show_default = False
+ show_default_is_str = False
+
+ if self.show_default is not None:
+ if isinstance(self.show_default, str):
+ show_default_is_str = show_default = True
+ else:
+ show_default = self.show_default
+ elif ctx.show_default is not None:
+ show_default = ctx.show_default
+
+ if show_default_is_str or (
+ show_default and (default_value not in (None, UNSET))
+ ):
+ if show_default_is_str:
+ default_string = f"({self.show_default})"
+ elif isinstance(default_value, (list, tuple)):
+ default_string = ", ".join(str(d) for d in default_value)
+ elif isinstance(default_value, enum.Enum):
+ default_string = default_value.name
+ elif inspect.isfunction(default_value):
+ default_string = _("(dynamic)")
+ elif self.is_bool_flag and self.secondary_opts:
+ # For boolean flags that have distinct True/False opts,
+ # use the opt without prefix instead of the value.
+ default_string = _split_opt(
+ (self.opts if default_value else self.secondary_opts)[0]
+ )[1]
+ elif self.is_bool_flag and not self.secondary_opts and not default_value:
+ default_string = ""
+ elif default_value == "":
+ default_string = '""'
+ else:
+ default_string = str(default_value)
+
+ if default_string:
+ extra["default"] = default_string
+
+ if (
+ isinstance(self.type, types._NumberRangeBase)
+ # skip count with default range type
+ and not (self.count and self.type.min == 0 and self.type.max is None)
+ ):
+ range_str = self.type._describe_range()
+
+ if range_str:
+ extra["range"] = range_str
+
+ if self.required:
+ extra["required"] = "required"
+
+ return extra
+
+ def prompt_for_value(self, ctx: Context) -> t.Any:
+ """This is an alternative flow that can be activated in the full
+ value processing if a value does not exist. It will prompt the
+ user until a valid value exists and then returns the processed
+ value as result.
+ """
+ assert self.prompt is not None
+
+ # Calculate the default before prompting anything to lock in the value before
+ # attempting any user interaction.
+ default = self.get_default(ctx)
+
+ # A boolean flag can use a simplified [y/n] confirmation prompt.
+ if self.is_bool_flag:
+ # If we have no boolean default, we force the user to explicitly provide
+ # one.
+ if default in (UNSET, None):
+ default = None
+ # Nothing prevent you to declare an option that is simultaneously:
+ # 1) auto-detected as a boolean flag,
+ # 2) allowed to prompt, and
+ # 3) still declare a non-boolean default.
+ # This forced casting into a boolean is necessary to align any non-boolean
+ # default to the prompt, which is going to be a [y/n]-style confirmation
+ # because the option is still a boolean flag. That way, instead of [y/n],
+ # we get [Y/n] or [y/N] depending on the truthy value of the default.
+ # Refs: https://github.com/pallets/click/pull/3030#discussion_r2289180249
+ else:
+ default = bool(default)
+ return confirm(self.prompt, default)
+
+ # If show_default is set to True/False, provide this to `prompt` as well. For
+ # non-bool values of `show_default`, we use `prompt`'s default behavior
+ prompt_kwargs: t.Any = {}
+ if isinstance(self.show_default, bool):
+ prompt_kwargs["show_default"] = self.show_default
+
+ return prompt(
+ self.prompt,
+ # Use ``None`` to inform the prompt() function to reiterate until a valid
+ # value is provided by the user if we have no default.
+ default=None if default is UNSET else default,
+ type=self.type,
+ hide_input=self.hide_input,
+ show_choices=self.show_choices,
+ confirmation_prompt=self.confirmation_prompt,
+ value_proc=lambda x: self.process_value(ctx, x),
+ **prompt_kwargs,
+ )
+
+ def resolve_envvar_value(self, ctx: Context) -> str | None:
+ """:class:`Option` resolves its environment variable the same way as
+ :func:`Parameter.resolve_envvar_value`, but it also supports
+ :attr:`Context.auto_envvar_prefix`. If we could not find an environment from
+ the :attr:`envvar` property, we fallback on :attr:`Context.auto_envvar_prefix`
+ to build dynamiccaly the environment variable name using the
+ :python:`{ctx.auto_envvar_prefix}_{self.name.upper()}` template.
+
+ :meta private:
+ """
+ rv = super().resolve_envvar_value(ctx)
+
+ if rv is not None:
+ return rv
+
+ if (
+ self.allow_from_autoenv
+ and ctx.auto_envvar_prefix is not None
+ and self.name is not None
+ ):
+ envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}"
+ rv = os.environ.get(envvar)
+
+ if rv:
+ return rv
+
+ return None
+
+ def value_from_envvar(self, ctx: Context) -> t.Any:
+ """For :class:`Option`, this method processes the raw environment variable
+ string the same way as :func:`Parameter.value_from_envvar` does.
+
+ But in the case of non-boolean flags, the value is analyzed to determine if the
+ flag is activated or not, and returns a boolean of its activation, or the
+ :attr:`flag_value` if the latter is set.
+
+ This method also takes care of repeated options (i.e. options with
+ :attr:`multiple` set to ``True``).
+
+ :meta private:
+ """
+ rv = self.resolve_envvar_value(ctx)
+
+ # Absent environment variable or an empty string is interpreted as unset.
+ if rv is None:
+ return None
+
+ # Non-boolean flags are more liberal in what they accept. But a flag being a
+ # flag, its envvar value still needs to be analyzed to determine if the flag is
+ # activated or not.
+ if self.is_flag and not self.is_bool_flag:
+ # If the flag_value is set and match the envvar value, return it
+ # directly.
+ if self.flag_value is not UNSET and rv == self.flag_value:
+ return self.flag_value
+ # Analyze the envvar value as a boolean to know if the flag is
+ # activated or not.
+ return types.BoolParamType.str_to_bool(rv)
+
+ # Split the envvar value if it is allowed to be repeated.
+ value_depth = (self.nargs != 1) + bool(self.multiple)
+ if value_depth > 0:
+ multi_rv = self.type.split_envvar_value(rv)
+ if self.multiple and self.nargs != 1:
+ multi_rv = batch(multi_rv, self.nargs) # type: ignore[assignment]
+
+ return multi_rv
+
+ return rv
+
+ def consume_value(
+ self, ctx: Context, opts: cabc.Mapping[str, Parameter]
+ ) -> tuple[t.Any, ParameterSource]:
+ """For :class:`Option`, the value can be collected from an interactive prompt
+ if the option is a flag that needs a value (and the :attr:`prompt` property is
+ set).
+
+ Additionally, this method handles flag option that are activated without a
+ value, in which case the :attr:`flag_value` is returned.
+
+ :meta private:
+ """
+ value, source = super().consume_value(ctx, opts)
+
+ # The parser will emit a sentinel value if the option is allowed to as a flag
+ # without a value.
+ if value is FLAG_NEEDS_VALUE:
+ # If the option allows for a prompt, we start an interaction with the user.
+ if self.prompt is not None and not ctx.resilient_parsing:
+ value = self.prompt_for_value(ctx)
+ source = ParameterSource.PROMPT
+ # Else the flag takes its flag_value as value.
+ else:
+ value = self.flag_value
+ source = ParameterSource.COMMANDLINE
+
+ # A flag which is activated always returns the flag value, unless the value
+ # comes from the explicitly sets default.
+ elif (
+ self.is_flag
+ and value is True
+ and not self.is_bool_flag
+ and source not in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP)
+ ):
+ value = self.flag_value
+
+ # Re-interpret a multiple option which has been sent as-is by the parser.
+ # Here we replace each occurrence of value-less flags (marked by the
+ # FLAG_NEEDS_VALUE sentinel) with the flag_value.
+ elif (
+ self.multiple
+ and value is not UNSET
+ and source not in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP)
+ and any(v is FLAG_NEEDS_VALUE for v in value)
+ ):
+ value = [self.flag_value if v is FLAG_NEEDS_VALUE else v for v in value]
+ source = ParameterSource.COMMANDLINE
+
+ # The value wasn't set, or used the param's default, prompt for one to the user
+ # if prompting is enabled.
+ elif (
+ (
+ value is UNSET
+ or source in (ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP)
+ )
+ and self.prompt is not None
+ and (self.required or self.prompt_required)
+ and not ctx.resilient_parsing
+ ):
+ value = self.prompt_for_value(ctx)
+ source = ParameterSource.PROMPT
+
+ return value, source
+
+ def process_value(self, ctx: Context, value: t.Any) -> t.Any:
+ # process_value has to be overridden on Options in order to capture
+ # `value == UNSET` cases before `type_cast_value()` gets called.
+ #
+ # Refs:
+ # https://github.com/pallets/click/issues/3069
+ if self.is_flag and not self.required and self.is_bool_flag and value is UNSET:
+ value = False
+
+ if self.callback is not None:
+ value = self.callback(ctx, self, value)
+
+ return value
+
+ # in the normal case, rely on Parameter.process_value
+ return super().process_value(ctx, value)
+
+
+class Argument(Parameter):
+ """Arguments are positional parameters to a command. They generally
+ provide fewer features than options but can have infinite ``nargs``
+ and are required by default.
+
+ All parameters are passed onwards to the constructor of :class:`Parameter`.
+ """
+
+ param_type_name = "argument"
+
+ def __init__(
+ self,
+ param_decls: cabc.Sequence[str],
+ required: bool | None = None,
+ **attrs: t.Any,
+ ) -> None:
+ # Auto-detect the requirement status of the argument if not explicitly set.
+ if required is None:
+ # The argument gets automatically required if it has no explicit default
+ # value set and is setup to match at least one value.
+ if attrs.get("default", UNSET) is UNSET:
+ required = attrs.get("nargs", 1) > 0
+ # If the argument has a default value, it is not required.
+ else:
+ required = False
+
+ if "multiple" in attrs:
+ raise TypeError("__init__() got an unexpected keyword argument 'multiple'.")
+
+ super().__init__(param_decls, required=required, **attrs)
+
+ @property
+ def human_readable_name(self) -> str:
+ if self.metavar is not None:
+ return self.metavar
+ return self.name.upper() # type: ignore
+
+ def make_metavar(self, ctx: Context) -> str:
+ if self.metavar is not None:
+ return self.metavar
+ var = self.type.get_metavar(param=self, ctx=ctx)
+ if not var:
+ var = self.name.upper() # type: ignore
+ if self.deprecated:
+ var += "!"
+ if not self.required:
+ var = f"[{var}]"
+ if self.nargs != 1:
+ var += "..."
+ return var
+
+ def _parse_decls(
+ self, decls: cabc.Sequence[str], expose_value: bool
+ ) -> tuple[str | None, list[str], list[str]]:
+ if not decls:
+ if not expose_value:
+ return None, [], []
+ raise TypeError("Argument is marked as exposed, but does not have a name.")
+ if len(decls) == 1:
+ name = arg = decls[0]
+ name = name.replace("-", "_").lower()
+ else:
+ raise TypeError(
+ "Arguments take exactly one parameter declaration, got"
+ f" {len(decls)}: {decls}."
+ )
+ return name, [arg], []
+
+ def get_usage_pieces(self, ctx: Context) -> list[str]:
+ return [self.make_metavar(ctx)]
+
+ def get_error_hint(self, ctx: Context) -> str:
+ return f"'{self.make_metavar(ctx)}'"
+
+ def add_to_parser(self, parser: _OptionParser, ctx: Context) -> None:
+ parser.add_argument(dest=self.name, nargs=self.nargs, obj=self)
+
+
+def __getattr__(name: str) -> object:
+ import warnings
+
+ if name == "BaseCommand":
+ warnings.warn(
+ "'BaseCommand' is deprecated and will be removed in Click 9.0. Use"
+ " 'Command' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return _BaseCommand
+
+ if name == "MultiCommand":
+ warnings.warn(
+ "'MultiCommand' is deprecated and will be removed in Click 9.0. Use"
+ " 'Group' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return _MultiCommand
+
+ raise AttributeError(name)
diff --git a/Backend/venv/lib/python3.12/site-packages/click/decorators.py b/Backend/venv/lib/python3.12/site-packages/click/decorators.py
new file mode 100644
index 00000000..21f4c342
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click/decorators.py
@@ -0,0 +1,551 @@
+from __future__ import annotations
+
+import inspect
+import typing as t
+from functools import update_wrapper
+from gettext import gettext as _
+
+from .core import Argument
+from .core import Command
+from .core import Context
+from .core import Group
+from .core import Option
+from .core import Parameter
+from .globals import get_current_context
+from .utils import echo
+
+if t.TYPE_CHECKING:
+ import typing_extensions as te
+
+ P = te.ParamSpec("P")
+
+R = t.TypeVar("R")
+T = t.TypeVar("T")
+_AnyCallable = t.Callable[..., t.Any]
+FC = t.TypeVar("FC", bound="_AnyCallable | Command")
+
+
+def pass_context(f: t.Callable[te.Concatenate[Context, P], R]) -> t.Callable[P, R]:
+ """Marks a callback as wanting to receive the current context
+ object as first argument.
+ """
+
+ def new_func(*args: P.args, **kwargs: P.kwargs) -> R:
+ return f(get_current_context(), *args, **kwargs)
+
+ return update_wrapper(new_func, f)
+
+
+def pass_obj(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]:
+ """Similar to :func:`pass_context`, but only pass the object on the
+ context onwards (:attr:`Context.obj`). This is useful if that object
+ represents the state of a nested system.
+ """
+
+ def new_func(*args: P.args, **kwargs: P.kwargs) -> R:
+ return f(get_current_context().obj, *args, **kwargs)
+
+ return update_wrapper(new_func, f)
+
+
+def make_pass_decorator(
+ object_type: type[T], ensure: bool = False
+) -> t.Callable[[t.Callable[te.Concatenate[T, P], R]], t.Callable[P, R]]:
+ """Given an object type this creates a decorator that will work
+ similar to :func:`pass_obj` but instead of passing the object of the
+ current context, it will find the innermost context of type
+ :func:`object_type`.
+
+ This generates a decorator that works roughly like this::
+
+ from functools import update_wrapper
+
+ def decorator(f):
+ @pass_context
+ def new_func(ctx, *args, **kwargs):
+ obj = ctx.find_object(object_type)
+ return ctx.invoke(f, obj, *args, **kwargs)
+ return update_wrapper(new_func, f)
+ return decorator
+
+ :param object_type: the type of the object to pass.
+ :param ensure: if set to `True`, a new object will be created and
+ remembered on the context if it's not there yet.
+ """
+
+ def decorator(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]:
+ def new_func(*args: P.args, **kwargs: P.kwargs) -> R:
+ ctx = get_current_context()
+
+ obj: T | None
+ if ensure:
+ obj = ctx.ensure_object(object_type)
+ else:
+ obj = ctx.find_object(object_type)
+
+ if obj is None:
+ raise RuntimeError(
+ "Managed to invoke callback without a context"
+ f" object of type {object_type.__name__!r}"
+ " existing."
+ )
+
+ return ctx.invoke(f, obj, *args, **kwargs)
+
+ return update_wrapper(new_func, f)
+
+ return decorator
+
+
+def pass_meta_key(
+ key: str, *, doc_description: str | None = None
+) -> t.Callable[[t.Callable[te.Concatenate[T, P], R]], t.Callable[P, R]]:
+ """Create a decorator that passes a key from
+ :attr:`click.Context.meta` as the first argument to the decorated
+ function.
+
+ :param key: Key in ``Context.meta`` to pass.
+ :param doc_description: Description of the object being passed,
+ inserted into the decorator's docstring. Defaults to "the 'key'
+ key from Context.meta".
+
+ .. versionadded:: 8.0
+ """
+
+ def decorator(f: t.Callable[te.Concatenate[T, P], R]) -> t.Callable[P, R]:
+ def new_func(*args: P.args, **kwargs: P.kwargs) -> R:
+ ctx = get_current_context()
+ obj = ctx.meta[key]
+ return ctx.invoke(f, obj, *args, **kwargs)
+
+ return update_wrapper(new_func, f)
+
+ if doc_description is None:
+ doc_description = f"the {key!r} key from :attr:`click.Context.meta`"
+
+ decorator.__doc__ = (
+ f"Decorator that passes {doc_description} as the first argument"
+ " to the decorated function."
+ )
+ return decorator
+
+
+CmdType = t.TypeVar("CmdType", bound=Command)
+
+
+# variant: no call, directly as decorator for a function.
+@t.overload
+def command(name: _AnyCallable) -> Command: ...
+
+
+# variant: with positional name and with positional or keyword cls argument:
+# @command(namearg, CommandCls, ...) or @command(namearg, cls=CommandCls, ...)
+@t.overload
+def command(
+ name: str | None,
+ cls: type[CmdType],
+ **attrs: t.Any,
+) -> t.Callable[[_AnyCallable], CmdType]: ...
+
+
+# variant: name omitted, cls _must_ be a keyword argument, @command(cls=CommandCls, ...)
+@t.overload
+def command(
+ name: None = None,
+ *,
+ cls: type[CmdType],
+ **attrs: t.Any,
+) -> t.Callable[[_AnyCallable], CmdType]: ...
+
+
+# variant: with optional string name, no cls argument provided.
+@t.overload
+def command(
+ name: str | None = ..., cls: None = None, **attrs: t.Any
+) -> t.Callable[[_AnyCallable], Command]: ...
+
+
+def command(
+ name: str | _AnyCallable | None = None,
+ cls: type[CmdType] | None = None,
+ **attrs: t.Any,
+) -> Command | t.Callable[[_AnyCallable], Command | CmdType]:
+ r"""Creates a new :class:`Command` and uses the decorated function as
+ callback. This will also automatically attach all decorated
+ :func:`option`\s and :func:`argument`\s as parameters to the command.
+
+ The name of the command defaults to the name of the function, converted to
+ lowercase, with underscores ``_`` replaced by dashes ``-``, and the suffixes
+ ``_command``, ``_cmd``, ``_group``, and ``_grp`` are removed. For example,
+ ``init_data_command`` becomes ``init-data``.
+
+ All keyword arguments are forwarded to the underlying command class.
+ For the ``params`` argument, any decorated params are appended to
+ the end of the list.
+
+ Once decorated the function turns into a :class:`Command` instance
+ that can be invoked as a command line utility or be attached to a
+ command :class:`Group`.
+
+ :param name: The name of the command. Defaults to modifying the function's
+ name as described above.
+ :param cls: The command class to create. Defaults to :class:`Command`.
+
+ .. versionchanged:: 8.2
+ The suffixes ``_command``, ``_cmd``, ``_group``, and ``_grp`` are
+ removed when generating the name.
+
+ .. versionchanged:: 8.1
+ This decorator can be applied without parentheses.
+
+ .. versionchanged:: 8.1
+ The ``params`` argument can be used. Decorated params are
+ appended to the end of the list.
+ """
+
+ func: t.Callable[[_AnyCallable], t.Any] | None = None
+
+ if callable(name):
+ func = name
+ name = None
+ assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class."
+ assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments."
+
+ if cls is None:
+ cls = t.cast("type[CmdType]", Command)
+
+ def decorator(f: _AnyCallable) -> CmdType:
+ if isinstance(f, Command):
+ raise TypeError("Attempted to convert a callback into a command twice.")
+
+ attr_params = attrs.pop("params", None)
+ params = attr_params if attr_params is not None else []
+
+ try:
+ decorator_params = f.__click_params__ # type: ignore
+ except AttributeError:
+ pass
+ else:
+ del f.__click_params__ # type: ignore
+ params.extend(reversed(decorator_params))
+
+ if attrs.get("help") is None:
+ attrs["help"] = f.__doc__
+
+ if t.TYPE_CHECKING:
+ assert cls is not None
+ assert not callable(name)
+
+ if name is not None:
+ cmd_name = name
+ else:
+ cmd_name = f.__name__.lower().replace("_", "-")
+ cmd_left, sep, suffix = cmd_name.rpartition("-")
+
+ if sep and suffix in {"command", "cmd", "group", "grp"}:
+ cmd_name = cmd_left
+
+ cmd = cls(name=cmd_name, callback=f, params=params, **attrs)
+ cmd.__doc__ = f.__doc__
+ return cmd
+
+ if func is not None:
+ return decorator(func)
+
+ return decorator
+
+
+GrpType = t.TypeVar("GrpType", bound=Group)
+
+
+# variant: no call, directly as decorator for a function.
+@t.overload
+def group(name: _AnyCallable) -> Group: ...
+
+
+# variant: with positional name and with positional or keyword cls argument:
+# @group(namearg, GroupCls, ...) or @group(namearg, cls=GroupCls, ...)
+@t.overload
+def group(
+ name: str | None,
+ cls: type[GrpType],
+ **attrs: t.Any,
+) -> t.Callable[[_AnyCallable], GrpType]: ...
+
+
+# variant: name omitted, cls _must_ be a keyword argument, @group(cmd=GroupCls, ...)
+@t.overload
+def group(
+ name: None = None,
+ *,
+ cls: type[GrpType],
+ **attrs: t.Any,
+) -> t.Callable[[_AnyCallable], GrpType]: ...
+
+
+# variant: with optional string name, no cls argument provided.
+@t.overload
+def group(
+ name: str | None = ..., cls: None = None, **attrs: t.Any
+) -> t.Callable[[_AnyCallable], Group]: ...
+
+
+def group(
+ name: str | _AnyCallable | None = None,
+ cls: type[GrpType] | None = None,
+ **attrs: t.Any,
+) -> Group | t.Callable[[_AnyCallable], Group | GrpType]:
+ """Creates a new :class:`Group` with a function as callback. This
+ works otherwise the same as :func:`command` just that the `cls`
+ parameter is set to :class:`Group`.
+
+ .. versionchanged:: 8.1
+ This decorator can be applied without parentheses.
+ """
+ if cls is None:
+ cls = t.cast("type[GrpType]", Group)
+
+ if callable(name):
+ return command(cls=cls, **attrs)(name)
+
+ return command(name, cls, **attrs)
+
+
+def _param_memo(f: t.Callable[..., t.Any], param: Parameter) -> None:
+ if isinstance(f, Command):
+ f.params.append(param)
+ else:
+ if not hasattr(f, "__click_params__"):
+ f.__click_params__ = [] # type: ignore
+
+ f.__click_params__.append(param) # type: ignore
+
+
+def argument(
+ *param_decls: str, cls: type[Argument] | None = None, **attrs: t.Any
+) -> t.Callable[[FC], FC]:
+ """Attaches an argument to the command. All positional arguments are
+ passed as parameter declarations to :class:`Argument`; all keyword
+ arguments are forwarded unchanged (except ``cls``).
+ This is equivalent to creating an :class:`Argument` instance manually
+ and attaching it to the :attr:`Command.params` list.
+
+ For the default argument class, refer to :class:`Argument` and
+ :class:`Parameter` for descriptions of parameters.
+
+ :param cls: the argument class to instantiate. This defaults to
+ :class:`Argument`.
+ :param param_decls: Passed as positional arguments to the constructor of
+ ``cls``.
+ :param attrs: Passed as keyword arguments to the constructor of ``cls``.
+ """
+ if cls is None:
+ cls = Argument
+
+ def decorator(f: FC) -> FC:
+ _param_memo(f, cls(param_decls, **attrs))
+ return f
+
+ return decorator
+
+
+def option(
+ *param_decls: str, cls: type[Option] | None = None, **attrs: t.Any
+) -> t.Callable[[FC], FC]:
+ """Attaches an option to the command. All positional arguments are
+ passed as parameter declarations to :class:`Option`; all keyword
+ arguments are forwarded unchanged (except ``cls``).
+ This is equivalent to creating an :class:`Option` instance manually
+ and attaching it to the :attr:`Command.params` list.
+
+ For the default option class, refer to :class:`Option` and
+ :class:`Parameter` for descriptions of parameters.
+
+ :param cls: the option class to instantiate. This defaults to
+ :class:`Option`.
+ :param param_decls: Passed as positional arguments to the constructor of
+ ``cls``.
+ :param attrs: Passed as keyword arguments to the constructor of ``cls``.
+ """
+ if cls is None:
+ cls = Option
+
+ def decorator(f: FC) -> FC:
+ _param_memo(f, cls(param_decls, **attrs))
+ return f
+
+ return decorator
+
+
+def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]:
+ """Add a ``--yes`` option which shows a prompt before continuing if
+ not passed. If the prompt is declined, the program will exit.
+
+ :param param_decls: One or more option names. Defaults to the single
+ value ``"--yes"``.
+ :param kwargs: Extra arguments are passed to :func:`option`.
+ """
+
+ def callback(ctx: Context, param: Parameter, value: bool) -> None:
+ if not value:
+ ctx.abort()
+
+ if not param_decls:
+ param_decls = ("--yes",)
+
+ kwargs.setdefault("is_flag", True)
+ kwargs.setdefault("callback", callback)
+ kwargs.setdefault("expose_value", False)
+ kwargs.setdefault("prompt", "Do you want to continue?")
+ kwargs.setdefault("help", "Confirm the action without prompting.")
+ return option(*param_decls, **kwargs)
+
+
+def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]:
+ """Add a ``--password`` option which prompts for a password, hiding
+ input and asking to enter the value again for confirmation.
+
+ :param param_decls: One or more option names. Defaults to the single
+ value ``"--password"``.
+ :param kwargs: Extra arguments are passed to :func:`option`.
+ """
+ if not param_decls:
+ param_decls = ("--password",)
+
+ kwargs.setdefault("prompt", True)
+ kwargs.setdefault("confirmation_prompt", True)
+ kwargs.setdefault("hide_input", True)
+ return option(*param_decls, **kwargs)
+
+
+def version_option(
+ version: str | None = None,
+ *param_decls: str,
+ package_name: str | None = None,
+ prog_name: str | None = None,
+ message: str | None = None,
+ **kwargs: t.Any,
+) -> t.Callable[[FC], FC]:
+ """Add a ``--version`` option which immediately prints the version
+ number and exits the program.
+
+ If ``version`` is not provided, Click will try to detect it using
+ :func:`importlib.metadata.version` to get the version for the
+ ``package_name``.
+
+ If ``package_name`` is not provided, Click will try to detect it by
+ inspecting the stack frames. This will be used to detect the
+ version, so it must match the name of the installed package.
+
+ :param version: The version number to show. If not provided, Click
+ will try to detect it.
+ :param param_decls: One or more option names. Defaults to the single
+ value ``"--version"``.
+ :param package_name: The package name to detect the version from. If
+ not provided, Click will try to detect it.
+ :param prog_name: The name of the CLI to show in the message. If not
+ provided, it will be detected from the command.
+ :param message: The message to show. The values ``%(prog)s``,
+ ``%(package)s``, and ``%(version)s`` are available. Defaults to
+ ``"%(prog)s, version %(version)s"``.
+ :param kwargs: Extra arguments are passed to :func:`option`.
+ :raise RuntimeError: ``version`` could not be detected.
+
+ .. versionchanged:: 8.0
+ Add the ``package_name`` parameter, and the ``%(package)s``
+ value for messages.
+
+ .. versionchanged:: 8.0
+ Use :mod:`importlib.metadata` instead of ``pkg_resources``. The
+ version is detected based on the package name, not the entry
+ point name. The Python package name must match the installed
+ package name, or be passed with ``package_name=``.
+ """
+ if message is None:
+ message = _("%(prog)s, version %(version)s")
+
+ if version is None and package_name is None:
+ frame = inspect.currentframe()
+ f_back = frame.f_back if frame is not None else None
+ f_globals = f_back.f_globals if f_back is not None else None
+ # break reference cycle
+ # https://docs.python.org/3/library/inspect.html#the-interpreter-stack
+ del frame
+
+ if f_globals is not None:
+ package_name = f_globals.get("__name__")
+
+ if package_name == "__main__":
+ package_name = f_globals.get("__package__")
+
+ if package_name:
+ package_name = package_name.partition(".")[0]
+
+ def callback(ctx: Context, param: Parameter, value: bool) -> None:
+ if not value or ctx.resilient_parsing:
+ return
+
+ nonlocal prog_name
+ nonlocal version
+
+ if prog_name is None:
+ prog_name = ctx.find_root().info_name
+
+ if version is None and package_name is not None:
+ import importlib.metadata
+
+ try:
+ version = importlib.metadata.version(package_name)
+ except importlib.metadata.PackageNotFoundError:
+ raise RuntimeError(
+ f"{package_name!r} is not installed. Try passing"
+ " 'package_name' instead."
+ ) from None
+
+ if version is None:
+ raise RuntimeError(
+ f"Could not determine the version for {package_name!r} automatically."
+ )
+
+ echo(
+ message % {"prog": prog_name, "package": package_name, "version": version},
+ color=ctx.color,
+ )
+ ctx.exit()
+
+ if not param_decls:
+ param_decls = ("--version",)
+
+ kwargs.setdefault("is_flag", True)
+ kwargs.setdefault("expose_value", False)
+ kwargs.setdefault("is_eager", True)
+ kwargs.setdefault("help", _("Show the version and exit."))
+ kwargs["callback"] = callback
+ return option(*param_decls, **kwargs)
+
+
+def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]:
+ """Pre-configured ``--help`` option which immediately prints the help page
+ and exits the program.
+
+ :param param_decls: One or more option names. Defaults to the single
+ value ``"--help"``.
+ :param kwargs: Extra arguments are passed to :func:`option`.
+ """
+
+ def show_help(ctx: Context, param: Parameter, value: bool) -> None:
+ """Callback that print the help page on ```` and exits."""
+ if value and not ctx.resilient_parsing:
+ echo(ctx.get_help(), color=ctx.color)
+ ctx.exit()
+
+ if not param_decls:
+ param_decls = ("--help",)
+
+ kwargs.setdefault("is_flag", True)
+ kwargs.setdefault("expose_value", False)
+ kwargs.setdefault("is_eager", True)
+ kwargs.setdefault("help", _("Show this message and exit."))
+ kwargs.setdefault("callback", show_help)
+
+ return option(*param_decls, **kwargs)
diff --git a/Backend/venv/lib/python3.12/site-packages/click/exceptions.py b/Backend/venv/lib/python3.12/site-packages/click/exceptions.py
new file mode 100644
index 00000000..4d782ee3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click/exceptions.py
@@ -0,0 +1,308 @@
+from __future__ import annotations
+
+import collections.abc as cabc
+import typing as t
+from gettext import gettext as _
+from gettext import ngettext
+
+from ._compat import get_text_stderr
+from .globals import resolve_color_default
+from .utils import echo
+from .utils import format_filename
+
+if t.TYPE_CHECKING:
+ from .core import Command
+ from .core import Context
+ from .core import Parameter
+
+
+def _join_param_hints(param_hint: cabc.Sequence[str] | str | None) -> str | None:
+ if param_hint is not None and not isinstance(param_hint, str):
+ return " / ".join(repr(x) for x in param_hint)
+
+ return param_hint
+
+
+class ClickException(Exception):
+ """An exception that Click can handle and show to the user."""
+
+ #: The exit code for this exception.
+ exit_code = 1
+
+ def __init__(self, message: str) -> None:
+ super().__init__(message)
+ # The context will be removed by the time we print the message, so cache
+ # the color settings here to be used later on (in `show`)
+ self.show_color: bool | None = resolve_color_default()
+ self.message = message
+
+ def format_message(self) -> str:
+ return self.message
+
+ def __str__(self) -> str:
+ return self.message
+
+ def show(self, file: t.IO[t.Any] | None = None) -> None:
+ if file is None:
+ file = get_text_stderr()
+
+ echo(
+ _("Error: {message}").format(message=self.format_message()),
+ file=file,
+ color=self.show_color,
+ )
+
+
+class UsageError(ClickException):
+ """An internal exception that signals a usage error. This typically
+ aborts any further handling.
+
+ :param message: the error message to display.
+ :param ctx: optionally the context that caused this error. Click will
+ fill in the context automatically in some situations.
+ """
+
+ exit_code = 2
+
+ def __init__(self, message: str, ctx: Context | None = None) -> None:
+ super().__init__(message)
+ self.ctx = ctx
+ self.cmd: Command | None = self.ctx.command if self.ctx else None
+
+ def show(self, file: t.IO[t.Any] | None = None) -> None:
+ if file is None:
+ file = get_text_stderr()
+ color = None
+ hint = ""
+ if (
+ self.ctx is not None
+ and self.ctx.command.get_help_option(self.ctx) is not None
+ ):
+ hint = _("Try '{command} {option}' for help.").format(
+ command=self.ctx.command_path, option=self.ctx.help_option_names[0]
+ )
+ hint = f"{hint}\n"
+ if self.ctx is not None:
+ color = self.ctx.color
+ echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color)
+ echo(
+ _("Error: {message}").format(message=self.format_message()),
+ file=file,
+ color=color,
+ )
+
+
+class BadParameter(UsageError):
+ """An exception that formats out a standardized error message for a
+ bad parameter. This is useful when thrown from a callback or type as
+ Click will attach contextual information to it (for instance, which
+ parameter it is).
+
+ .. versionadded:: 2.0
+
+ :param param: the parameter object that caused this error. This can
+ be left out, and Click will attach this info itself
+ if possible.
+ :param param_hint: a string that shows up as parameter name. This
+ can be used as alternative to `param` in cases
+ where custom validation should happen. If it is
+ a string it's used as such, if it's a list then
+ each item is quoted and separated.
+ """
+
+ def __init__(
+ self,
+ message: str,
+ ctx: Context | None = None,
+ param: Parameter | None = None,
+ param_hint: cabc.Sequence[str] | str | None = None,
+ ) -> None:
+ super().__init__(message, ctx)
+ self.param = param
+ self.param_hint = param_hint
+
+ def format_message(self) -> str:
+ if self.param_hint is not None:
+ param_hint = self.param_hint
+ elif self.param is not None:
+ param_hint = self.param.get_error_hint(self.ctx) # type: ignore
+ else:
+ return _("Invalid value: {message}").format(message=self.message)
+
+ return _("Invalid value for {param_hint}: {message}").format(
+ param_hint=_join_param_hints(param_hint), message=self.message
+ )
+
+
+class MissingParameter(BadParameter):
+ """Raised if click required an option or argument but it was not
+ provided when invoking the script.
+
+ .. versionadded:: 4.0
+
+ :param param_type: a string that indicates the type of the parameter.
+ The default is to inherit the parameter type from
+ the given `param`. Valid values are ``'parameter'``,
+ ``'option'`` or ``'argument'``.
+ """
+
+ def __init__(
+ self,
+ message: str | None = None,
+ ctx: Context | None = None,
+ param: Parameter | None = None,
+ param_hint: cabc.Sequence[str] | str | None = None,
+ param_type: str | None = None,
+ ) -> None:
+ super().__init__(message or "", ctx, param, param_hint)
+ self.param_type = param_type
+
+ def format_message(self) -> str:
+ if self.param_hint is not None:
+ param_hint: cabc.Sequence[str] | str | None = self.param_hint
+ elif self.param is not None:
+ param_hint = self.param.get_error_hint(self.ctx) # type: ignore
+ else:
+ param_hint = None
+
+ param_hint = _join_param_hints(param_hint)
+ param_hint = f" {param_hint}" if param_hint else ""
+
+ param_type = self.param_type
+ if param_type is None and self.param is not None:
+ param_type = self.param.param_type_name
+
+ msg = self.message
+ if self.param is not None:
+ msg_extra = self.param.type.get_missing_message(
+ param=self.param, ctx=self.ctx
+ )
+ if msg_extra:
+ if msg:
+ msg += f". {msg_extra}"
+ else:
+ msg = msg_extra
+
+ msg = f" {msg}" if msg else ""
+
+ # Translate param_type for known types.
+ if param_type == "argument":
+ missing = _("Missing argument")
+ elif param_type == "option":
+ missing = _("Missing option")
+ elif param_type == "parameter":
+ missing = _("Missing parameter")
+ else:
+ missing = _("Missing {param_type}").format(param_type=param_type)
+
+ return f"{missing}{param_hint}.{msg}"
+
+ def __str__(self) -> str:
+ if not self.message:
+ param_name = self.param.name if self.param else None
+ return _("Missing parameter: {param_name}").format(param_name=param_name)
+ else:
+ return self.message
+
+
+class NoSuchOption(UsageError):
+ """Raised if click attempted to handle an option that does not
+ exist.
+
+ .. versionadded:: 4.0
+ """
+
+ def __init__(
+ self,
+ option_name: str,
+ message: str | None = None,
+ possibilities: cabc.Sequence[str] | None = None,
+ ctx: Context | None = None,
+ ) -> None:
+ if message is None:
+ message = _("No such option: {name}").format(name=option_name)
+
+ super().__init__(message, ctx)
+ self.option_name = option_name
+ self.possibilities = possibilities
+
+ def format_message(self) -> str:
+ if not self.possibilities:
+ return self.message
+
+ possibility_str = ", ".join(sorted(self.possibilities))
+ suggest = ngettext(
+ "Did you mean {possibility}?",
+ "(Possible options: {possibilities})",
+ len(self.possibilities),
+ ).format(possibility=possibility_str, possibilities=possibility_str)
+ return f"{self.message} {suggest}"
+
+
+class BadOptionUsage(UsageError):
+ """Raised if an option is generally supplied but the use of the option
+ was incorrect. This is for instance raised if the number of arguments
+ for an option is not correct.
+
+ .. versionadded:: 4.0
+
+ :param option_name: the name of the option being used incorrectly.
+ """
+
+ def __init__(
+ self, option_name: str, message: str, ctx: Context | None = None
+ ) -> None:
+ super().__init__(message, ctx)
+ self.option_name = option_name
+
+
+class BadArgumentUsage(UsageError):
+ """Raised if an argument is generally supplied but the use of the argument
+ was incorrect. This is for instance raised if the number of values
+ for an argument is not correct.
+
+ .. versionadded:: 6.0
+ """
+
+
+class NoArgsIsHelpError(UsageError):
+ def __init__(self, ctx: Context) -> None:
+ self.ctx: Context
+ super().__init__(ctx.get_help(), ctx=ctx)
+
+ def show(self, file: t.IO[t.Any] | None = None) -> None:
+ echo(self.format_message(), file=file, err=True, color=self.ctx.color)
+
+
+class FileError(ClickException):
+ """Raised if a file cannot be opened."""
+
+ def __init__(self, filename: str, hint: str | None = None) -> None:
+ if hint is None:
+ hint = _("unknown error")
+
+ super().__init__(hint)
+ self.ui_filename: str = format_filename(filename)
+ self.filename = filename
+
+ def format_message(self) -> str:
+ return _("Could not open file {filename!r}: {message}").format(
+ filename=self.ui_filename, message=self.message
+ )
+
+
+class Abort(RuntimeError):
+ """An internal signalling exception that signals Click to abort."""
+
+
+class Exit(RuntimeError):
+ """An exception that indicates that the application should exit with some
+ status code.
+
+ :param code: the status code to exit with.
+ """
+
+ __slots__ = ("exit_code",)
+
+ def __init__(self, code: int = 0) -> None:
+ self.exit_code: int = code
diff --git a/Backend/venv/lib/python3.12/site-packages/click/formatting.py b/Backend/venv/lib/python3.12/site-packages/click/formatting.py
new file mode 100644
index 00000000..0b64f831
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click/formatting.py
@@ -0,0 +1,301 @@
+from __future__ import annotations
+
+import collections.abc as cabc
+from contextlib import contextmanager
+from gettext import gettext as _
+
+from ._compat import term_len
+from .parser import _split_opt
+
+# Can force a width. This is used by the test system
+FORCED_WIDTH: int | None = None
+
+
+def measure_table(rows: cabc.Iterable[tuple[str, str]]) -> tuple[int, ...]:
+ widths: dict[int, int] = {}
+
+ for row in rows:
+ for idx, col in enumerate(row):
+ widths[idx] = max(widths.get(idx, 0), term_len(col))
+
+ return tuple(y for x, y in sorted(widths.items()))
+
+
+def iter_rows(
+ rows: cabc.Iterable[tuple[str, str]], col_count: int
+) -> cabc.Iterator[tuple[str, ...]]:
+ for row in rows:
+ yield row + ("",) * (col_count - len(row))
+
+
+def wrap_text(
+ text: str,
+ width: int = 78,
+ initial_indent: str = "",
+ subsequent_indent: str = "",
+ preserve_paragraphs: bool = False,
+) -> str:
+ """A helper function that intelligently wraps text. By default, it
+ assumes that it operates on a single paragraph of text but if the
+ `preserve_paragraphs` parameter is provided it will intelligently
+ handle paragraphs (defined by two empty lines).
+
+ If paragraphs are handled, a paragraph can be prefixed with an empty
+ line containing the ``\\b`` character (``\\x08``) to indicate that
+ no rewrapping should happen in that block.
+
+ :param text: the text that should be rewrapped.
+ :param width: the maximum width for the text.
+ :param initial_indent: the initial indent that should be placed on the
+ first line as a string.
+ :param subsequent_indent: the indent string that should be placed on
+ each consecutive line.
+ :param preserve_paragraphs: if this flag is set then the wrapping will
+ intelligently handle paragraphs.
+ """
+ from ._textwrap import TextWrapper
+
+ text = text.expandtabs()
+ wrapper = TextWrapper(
+ width,
+ initial_indent=initial_indent,
+ subsequent_indent=subsequent_indent,
+ replace_whitespace=False,
+ )
+ if not preserve_paragraphs:
+ return wrapper.fill(text)
+
+ p: list[tuple[int, bool, str]] = []
+ buf: list[str] = []
+ indent = None
+
+ def _flush_par() -> None:
+ if not buf:
+ return
+ if buf[0].strip() == "\b":
+ p.append((indent or 0, True, "\n".join(buf[1:])))
+ else:
+ p.append((indent or 0, False, " ".join(buf)))
+ del buf[:]
+
+ for line in text.splitlines():
+ if not line:
+ _flush_par()
+ indent = None
+ else:
+ if indent is None:
+ orig_len = term_len(line)
+ line = line.lstrip()
+ indent = orig_len - term_len(line)
+ buf.append(line)
+ _flush_par()
+
+ rv = []
+ for indent, raw, text in p:
+ with wrapper.extra_indent(" " * indent):
+ if raw:
+ rv.append(wrapper.indent_only(text))
+ else:
+ rv.append(wrapper.fill(text))
+
+ return "\n\n".join(rv)
+
+
+class HelpFormatter:
+ """This class helps with formatting text-based help pages. It's
+ usually just needed for very special internal cases, but it's also
+ exposed so that developers can write their own fancy outputs.
+
+ At present, it always writes into memory.
+
+ :param indent_increment: the additional increment for each level.
+ :param width: the width for the text. This defaults to the terminal
+ width clamped to a maximum of 78.
+ """
+
+ def __init__(
+ self,
+ indent_increment: int = 2,
+ width: int | None = None,
+ max_width: int | None = None,
+ ) -> None:
+ self.indent_increment = indent_increment
+ if max_width is None:
+ max_width = 80
+ if width is None:
+ import shutil
+
+ width = FORCED_WIDTH
+ if width is None:
+ width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50)
+ self.width = width
+ self.current_indent: int = 0
+ self.buffer: list[str] = []
+
+ def write(self, string: str) -> None:
+ """Writes a unicode string into the internal buffer."""
+ self.buffer.append(string)
+
+ def indent(self) -> None:
+ """Increases the indentation."""
+ self.current_indent += self.indent_increment
+
+ def dedent(self) -> None:
+ """Decreases the indentation."""
+ self.current_indent -= self.indent_increment
+
+ def write_usage(self, prog: str, args: str = "", prefix: str | None = None) -> None:
+ """Writes a usage line into the buffer.
+
+ :param prog: the program name.
+ :param args: whitespace separated list of arguments.
+ :param prefix: The prefix for the first line. Defaults to
+ ``"Usage: "``.
+ """
+ if prefix is None:
+ prefix = f"{_('Usage:')} "
+
+ usage_prefix = f"{prefix:>{self.current_indent}}{prog} "
+ text_width = self.width - self.current_indent
+
+ if text_width >= (term_len(usage_prefix) + 20):
+ # The arguments will fit to the right of the prefix.
+ indent = " " * term_len(usage_prefix)
+ self.write(
+ wrap_text(
+ args,
+ text_width,
+ initial_indent=usage_prefix,
+ subsequent_indent=indent,
+ )
+ )
+ else:
+ # The prefix is too long, put the arguments on the next line.
+ self.write(usage_prefix)
+ self.write("\n")
+ indent = " " * (max(self.current_indent, term_len(prefix)) + 4)
+ self.write(
+ wrap_text(
+ args, text_width, initial_indent=indent, subsequent_indent=indent
+ )
+ )
+
+ self.write("\n")
+
+ def write_heading(self, heading: str) -> None:
+ """Writes a heading into the buffer."""
+ self.write(f"{'':>{self.current_indent}}{heading}:\n")
+
+ def write_paragraph(self) -> None:
+ """Writes a paragraph into the buffer."""
+ if self.buffer:
+ self.write("\n")
+
+ def write_text(self, text: str) -> None:
+ """Writes re-indented text into the buffer. This rewraps and
+ preserves paragraphs.
+ """
+ indent = " " * self.current_indent
+ self.write(
+ wrap_text(
+ text,
+ self.width,
+ initial_indent=indent,
+ subsequent_indent=indent,
+ preserve_paragraphs=True,
+ )
+ )
+ self.write("\n")
+
+ def write_dl(
+ self,
+ rows: cabc.Sequence[tuple[str, str]],
+ col_max: int = 30,
+ col_spacing: int = 2,
+ ) -> None:
+ """Writes a definition list into the buffer. This is how options
+ and commands are usually formatted.
+
+ :param rows: a list of two item tuples for the terms and values.
+ :param col_max: the maximum width of the first column.
+ :param col_spacing: the number of spaces between the first and
+ second column.
+ """
+ rows = list(rows)
+ widths = measure_table(rows)
+ if len(widths) != 2:
+ raise TypeError("Expected two columns for definition list")
+
+ first_col = min(widths[0], col_max) + col_spacing
+
+ for first, second in iter_rows(rows, len(widths)):
+ self.write(f"{'':>{self.current_indent}}{first}")
+ if not second:
+ self.write("\n")
+ continue
+ if term_len(first) <= first_col - col_spacing:
+ self.write(" " * (first_col - term_len(first)))
+ else:
+ self.write("\n")
+ self.write(" " * (first_col + self.current_indent))
+
+ text_width = max(self.width - first_col - 2, 10)
+ wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True)
+ lines = wrapped_text.splitlines()
+
+ if lines:
+ self.write(f"{lines[0]}\n")
+
+ for line in lines[1:]:
+ self.write(f"{'':>{first_col + self.current_indent}}{line}\n")
+ else:
+ self.write("\n")
+
+ @contextmanager
+ def section(self, name: str) -> cabc.Iterator[None]:
+ """Helpful context manager that writes a paragraph, a heading,
+ and the indents.
+
+ :param name: the section name that is written as heading.
+ """
+ self.write_paragraph()
+ self.write_heading(name)
+ self.indent()
+ try:
+ yield
+ finally:
+ self.dedent()
+
+ @contextmanager
+ def indentation(self) -> cabc.Iterator[None]:
+ """A context manager that increases the indentation."""
+ self.indent()
+ try:
+ yield
+ finally:
+ self.dedent()
+
+ def getvalue(self) -> str:
+ """Returns the buffer contents."""
+ return "".join(self.buffer)
+
+
+def join_options(options: cabc.Sequence[str]) -> tuple[str, bool]:
+ """Given a list of option strings this joins them in the most appropriate
+ way and returns them in the form ``(formatted_string,
+ any_prefix_is_slash)`` where the second item in the tuple is a flag that
+ indicates if any of the option prefixes was a slash.
+ """
+ rv = []
+ any_prefix_is_slash = False
+
+ for opt in options:
+ prefix = _split_opt(opt)[0]
+
+ if prefix == "/":
+ any_prefix_is_slash = True
+
+ rv.append((len(prefix), opt))
+
+ rv.sort(key=lambda x: x[0])
+ return ", ".join(x[1] for x in rv), any_prefix_is_slash
diff --git a/Backend/venv/lib/python3.12/site-packages/click/globals.py b/Backend/venv/lib/python3.12/site-packages/click/globals.py
new file mode 100644
index 00000000..a2f91723
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click/globals.py
@@ -0,0 +1,67 @@
+from __future__ import annotations
+
+import typing as t
+from threading import local
+
+if t.TYPE_CHECKING:
+ from .core import Context
+
+_local = local()
+
+
+@t.overload
+def get_current_context(silent: t.Literal[False] = False) -> Context: ...
+
+
+@t.overload
+def get_current_context(silent: bool = ...) -> Context | None: ...
+
+
+def get_current_context(silent: bool = False) -> Context | None:
+ """Returns the current click context. This can be used as a way to
+ access the current context object from anywhere. This is a more implicit
+ alternative to the :func:`pass_context` decorator. This function is
+ primarily useful for helpers such as :func:`echo` which might be
+ interested in changing its behavior based on the current context.
+
+ To push the current context, :meth:`Context.scope` can be used.
+
+ .. versionadded:: 5.0
+
+ :param silent: if set to `True` the return value is `None` if no context
+ is available. The default behavior is to raise a
+ :exc:`RuntimeError`.
+ """
+ try:
+ return t.cast("Context", _local.stack[-1])
+ except (AttributeError, IndexError) as e:
+ if not silent:
+ raise RuntimeError("There is no active click context.") from e
+
+ return None
+
+
+def push_context(ctx: Context) -> None:
+ """Pushes a new context to the current stack."""
+ _local.__dict__.setdefault("stack", []).append(ctx)
+
+
+def pop_context() -> None:
+ """Removes the top level from the stack."""
+ _local.stack.pop()
+
+
+def resolve_color_default(color: bool | None = None) -> bool | None:
+ """Internal helper to get the default value of the color flag. If a
+ value is passed it's returned unchanged, otherwise it's looked up from
+ the current context.
+ """
+ if color is not None:
+ return color
+
+ ctx = get_current_context(silent=True)
+
+ if ctx is not None:
+ return ctx.color
+
+ return None
diff --git a/Backend/venv/lib/python3.12/site-packages/click/parser.py b/Backend/venv/lib/python3.12/site-packages/click/parser.py
new file mode 100644
index 00000000..1ea1f716
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click/parser.py
@@ -0,0 +1,532 @@
+"""
+This module started out as largely a copy paste from the stdlib's
+optparse module with the features removed that we do not need from
+optparse because we implement them in Click on a higher level (for
+instance type handling, help formatting and a lot more).
+
+The plan is to remove more and more from here over time.
+
+The reason this is a different module and not optparse from the stdlib
+is that there are differences in 2.x and 3.x about the error messages
+generated and optparse in the stdlib uses gettext for no good reason
+and might cause us issues.
+
+Click uses parts of optparse written by Gregory P. Ward and maintained
+by the Python Software Foundation. This is limited to code in parser.py.
+
+Copyright 2001-2006 Gregory P. Ward. All rights reserved.
+Copyright 2002-2006 Python Software Foundation. All rights reserved.
+"""
+
+# This code uses parts of optparse written by Gregory P. Ward and
+# maintained by the Python Software Foundation.
+# Copyright 2001-2006 Gregory P. Ward
+# Copyright 2002-2006 Python Software Foundation
+from __future__ import annotations
+
+import collections.abc as cabc
+import typing as t
+from collections import deque
+from gettext import gettext as _
+from gettext import ngettext
+
+from ._utils import FLAG_NEEDS_VALUE
+from ._utils import UNSET
+from .exceptions import BadArgumentUsage
+from .exceptions import BadOptionUsage
+from .exceptions import NoSuchOption
+from .exceptions import UsageError
+
+if t.TYPE_CHECKING:
+ from ._utils import T_FLAG_NEEDS_VALUE
+ from ._utils import T_UNSET
+ from .core import Argument as CoreArgument
+ from .core import Context
+ from .core import Option as CoreOption
+ from .core import Parameter as CoreParameter
+
+V = t.TypeVar("V")
+
+
+def _unpack_args(
+ args: cabc.Sequence[str], nargs_spec: cabc.Sequence[int]
+) -> tuple[cabc.Sequence[str | cabc.Sequence[str | None] | None], list[str]]:
+ """Given an iterable of arguments and an iterable of nargs specifications,
+ it returns a tuple with all the unpacked arguments at the first index
+ and all remaining arguments as the second.
+
+ The nargs specification is the number of arguments that should be consumed
+ or `-1` to indicate that this position should eat up all the remainders.
+
+ Missing items are filled with ``UNSET``.
+ """
+ args = deque(args)
+ nargs_spec = deque(nargs_spec)
+ rv: list[str | tuple[str | T_UNSET, ...] | T_UNSET] = []
+ spos: int | None = None
+
+ def _fetch(c: deque[V]) -> V | T_UNSET:
+ try:
+ if spos is None:
+ return c.popleft()
+ else:
+ return c.pop()
+ except IndexError:
+ return UNSET
+
+ while nargs_spec:
+ nargs = _fetch(nargs_spec)
+
+ if nargs is None:
+ continue
+
+ if nargs == 1:
+ rv.append(_fetch(args)) # type: ignore[arg-type]
+ elif nargs > 1:
+ x = [_fetch(args) for _ in range(nargs)]
+
+ # If we're reversed, we're pulling in the arguments in reverse,
+ # so we need to turn them around.
+ if spos is not None:
+ x.reverse()
+
+ rv.append(tuple(x))
+ elif nargs < 0:
+ if spos is not None:
+ raise TypeError("Cannot have two nargs < 0")
+
+ spos = len(rv)
+ rv.append(UNSET)
+
+ # spos is the position of the wildcard (star). If it's not `None`,
+ # we fill it with the remainder.
+ if spos is not None:
+ rv[spos] = tuple(args)
+ args = []
+ rv[spos + 1 :] = reversed(rv[spos + 1 :])
+
+ return tuple(rv), list(args)
+
+
+def _split_opt(opt: str) -> tuple[str, str]:
+ first = opt[:1]
+ if first.isalnum():
+ return "", opt
+ if opt[1:2] == first:
+ return opt[:2], opt[2:]
+ return first, opt[1:]
+
+
+def _normalize_opt(opt: str, ctx: Context | None) -> str:
+ if ctx is None or ctx.token_normalize_func is None:
+ return opt
+ prefix, opt = _split_opt(opt)
+ return f"{prefix}{ctx.token_normalize_func(opt)}"
+
+
+class _Option:
+ def __init__(
+ self,
+ obj: CoreOption,
+ opts: cabc.Sequence[str],
+ dest: str | None,
+ action: str | None = None,
+ nargs: int = 1,
+ const: t.Any | None = None,
+ ):
+ self._short_opts = []
+ self._long_opts = []
+ self.prefixes: set[str] = set()
+
+ for opt in opts:
+ prefix, value = _split_opt(opt)
+ if not prefix:
+ raise ValueError(f"Invalid start character for option ({opt})")
+ self.prefixes.add(prefix[0])
+ if len(prefix) == 1 and len(value) == 1:
+ self._short_opts.append(opt)
+ else:
+ self._long_opts.append(opt)
+ self.prefixes.add(prefix)
+
+ if action is None:
+ action = "store"
+
+ self.dest = dest
+ self.action = action
+ self.nargs = nargs
+ self.const = const
+ self.obj = obj
+
+ @property
+ def takes_value(self) -> bool:
+ return self.action in ("store", "append")
+
+ def process(self, value: t.Any, state: _ParsingState) -> None:
+ if self.action == "store":
+ state.opts[self.dest] = value # type: ignore
+ elif self.action == "store_const":
+ state.opts[self.dest] = self.const # type: ignore
+ elif self.action == "append":
+ state.opts.setdefault(self.dest, []).append(value) # type: ignore
+ elif self.action == "append_const":
+ state.opts.setdefault(self.dest, []).append(self.const) # type: ignore
+ elif self.action == "count":
+ state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore
+ else:
+ raise ValueError(f"unknown action '{self.action}'")
+ state.order.append(self.obj)
+
+
+class _Argument:
+ def __init__(self, obj: CoreArgument, dest: str | None, nargs: int = 1):
+ self.dest = dest
+ self.nargs = nargs
+ self.obj = obj
+
+ def process(
+ self,
+ value: str | cabc.Sequence[str | None] | None | T_UNSET,
+ state: _ParsingState,
+ ) -> None:
+ if self.nargs > 1:
+ assert isinstance(value, cabc.Sequence)
+ holes = sum(1 for x in value if x is UNSET)
+ if holes == len(value):
+ value = UNSET
+ elif holes != 0:
+ raise BadArgumentUsage(
+ _("Argument {name!r} takes {nargs} values.").format(
+ name=self.dest, nargs=self.nargs
+ )
+ )
+
+ # We failed to collect any argument value so we consider the argument as unset.
+ if value == ():
+ value = UNSET
+
+ state.opts[self.dest] = value # type: ignore
+ state.order.append(self.obj)
+
+
+class _ParsingState:
+ def __init__(self, rargs: list[str]) -> None:
+ self.opts: dict[str, t.Any] = {}
+ self.largs: list[str] = []
+ self.rargs = rargs
+ self.order: list[CoreParameter] = []
+
+
+class _OptionParser:
+ """The option parser is an internal class that is ultimately used to
+ parse options and arguments. It's modelled after optparse and brings
+ a similar but vastly simplified API. It should generally not be used
+ directly as the high level Click classes wrap it for you.
+
+ It's not nearly as extensible as optparse or argparse as it does not
+ implement features that are implemented on a higher level (such as
+ types or defaults).
+
+ :param ctx: optionally the :class:`~click.Context` where this parser
+ should go with.
+
+ .. deprecated:: 8.2
+ Will be removed in Click 9.0.
+ """
+
+ def __init__(self, ctx: Context | None = None) -> None:
+ #: The :class:`~click.Context` for this parser. This might be
+ #: `None` for some advanced use cases.
+ self.ctx = ctx
+ #: This controls how the parser deals with interspersed arguments.
+ #: If this is set to `False`, the parser will stop on the first
+ #: non-option. Click uses this to implement nested subcommands
+ #: safely.
+ self.allow_interspersed_args: bool = True
+ #: This tells the parser how to deal with unknown options. By
+ #: default it will error out (which is sensible), but there is a
+ #: second mode where it will ignore it and continue processing
+ #: after shifting all the unknown options into the resulting args.
+ self.ignore_unknown_options: bool = False
+
+ if ctx is not None:
+ self.allow_interspersed_args = ctx.allow_interspersed_args
+ self.ignore_unknown_options = ctx.ignore_unknown_options
+
+ self._short_opt: dict[str, _Option] = {}
+ self._long_opt: dict[str, _Option] = {}
+ self._opt_prefixes = {"-", "--"}
+ self._args: list[_Argument] = []
+
+ def add_option(
+ self,
+ obj: CoreOption,
+ opts: cabc.Sequence[str],
+ dest: str | None,
+ action: str | None = None,
+ nargs: int = 1,
+ const: t.Any | None = None,
+ ) -> None:
+ """Adds a new option named `dest` to the parser. The destination
+ is not inferred (unlike with optparse) and needs to be explicitly
+ provided. Action can be any of ``store``, ``store_const``,
+ ``append``, ``append_const`` or ``count``.
+
+ The `obj` can be used to identify the option in the order list
+ that is returned from the parser.
+ """
+ opts = [_normalize_opt(opt, self.ctx) for opt in opts]
+ option = _Option(obj, opts, dest, action=action, nargs=nargs, const=const)
+ self._opt_prefixes.update(option.prefixes)
+ for opt in option._short_opts:
+ self._short_opt[opt] = option
+ for opt in option._long_opts:
+ self._long_opt[opt] = option
+
+ def add_argument(self, obj: CoreArgument, dest: str | None, nargs: int = 1) -> None:
+ """Adds a positional argument named `dest` to the parser.
+
+ The `obj` can be used to identify the option in the order list
+ that is returned from the parser.
+ """
+ self._args.append(_Argument(obj, dest=dest, nargs=nargs))
+
+ def parse_args(
+ self, args: list[str]
+ ) -> tuple[dict[str, t.Any], list[str], list[CoreParameter]]:
+ """Parses positional arguments and returns ``(values, args, order)``
+ for the parsed options and arguments as well as the leftover
+ arguments if there are any. The order is a list of objects as they
+ appear on the command line. If arguments appear multiple times they
+ will be memorized multiple times as well.
+ """
+ state = _ParsingState(args)
+ try:
+ self._process_args_for_options(state)
+ self._process_args_for_args(state)
+ except UsageError:
+ if self.ctx is None or not self.ctx.resilient_parsing:
+ raise
+ return state.opts, state.largs, state.order
+
+ def _process_args_for_args(self, state: _ParsingState) -> None:
+ pargs, args = _unpack_args(
+ state.largs + state.rargs, [x.nargs for x in self._args]
+ )
+
+ for idx, arg in enumerate(self._args):
+ arg.process(pargs[idx], state)
+
+ state.largs = args
+ state.rargs = []
+
+ def _process_args_for_options(self, state: _ParsingState) -> None:
+ while state.rargs:
+ arg = state.rargs.pop(0)
+ arglen = len(arg)
+ # Double dashes always handled explicitly regardless of what
+ # prefixes are valid.
+ if arg == "--":
+ return
+ elif arg[:1] in self._opt_prefixes and arglen > 1:
+ self._process_opts(arg, state)
+ elif self.allow_interspersed_args:
+ state.largs.append(arg)
+ else:
+ state.rargs.insert(0, arg)
+ return
+
+ # Say this is the original argument list:
+ # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)]
+ # ^
+ # (we are about to process arg(i)).
+ #
+ # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of
+ # [arg0, ..., arg(i-1)] (any options and their arguments will have
+ # been removed from largs).
+ #
+ # The while loop will usually consume 1 or more arguments per pass.
+ # If it consumes 1 (eg. arg is an option that takes no arguments),
+ # then after _process_arg() is done the situation is:
+ #
+ # largs = subset of [arg0, ..., arg(i)]
+ # rargs = [arg(i+1), ..., arg(N-1)]
+ #
+ # If allow_interspersed_args is false, largs will always be
+ # *empty* -- still a subset of [arg0, ..., arg(i-1)], but
+ # not a very interesting subset!
+
+ def _match_long_opt(
+ self, opt: str, explicit_value: str | None, state: _ParsingState
+ ) -> None:
+ if opt not in self._long_opt:
+ from difflib import get_close_matches
+
+ possibilities = get_close_matches(opt, self._long_opt)
+ raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx)
+
+ option = self._long_opt[opt]
+ if option.takes_value:
+ # At this point it's safe to modify rargs by injecting the
+ # explicit value, because no exception is raised in this
+ # branch. This means that the inserted value will be fully
+ # consumed.
+ if explicit_value is not None:
+ state.rargs.insert(0, explicit_value)
+
+ value = self._get_value_from_state(opt, option, state)
+
+ elif explicit_value is not None:
+ raise BadOptionUsage(
+ opt, _("Option {name!r} does not take a value.").format(name=opt)
+ )
+
+ else:
+ value = UNSET
+
+ option.process(value, state)
+
+ def _match_short_opt(self, arg: str, state: _ParsingState) -> None:
+ stop = False
+ i = 1
+ prefix = arg[0]
+ unknown_options = []
+
+ for ch in arg[1:]:
+ opt = _normalize_opt(f"{prefix}{ch}", self.ctx)
+ option = self._short_opt.get(opt)
+ i += 1
+
+ if not option:
+ if self.ignore_unknown_options:
+ unknown_options.append(ch)
+ continue
+ raise NoSuchOption(opt, ctx=self.ctx)
+ if option.takes_value:
+ # Any characters left in arg? Pretend they're the
+ # next arg, and stop consuming characters of arg.
+ if i < len(arg):
+ state.rargs.insert(0, arg[i:])
+ stop = True
+
+ value = self._get_value_from_state(opt, option, state)
+
+ else:
+ value = UNSET
+
+ option.process(value, state)
+
+ if stop:
+ break
+
+ # If we got any unknown options we recombine the string of the
+ # remaining options and re-attach the prefix, then report that
+ # to the state as new larg. This way there is basic combinatorics
+ # that can be achieved while still ignoring unknown arguments.
+ if self.ignore_unknown_options and unknown_options:
+ state.largs.append(f"{prefix}{''.join(unknown_options)}")
+
+ def _get_value_from_state(
+ self, option_name: str, option: _Option, state: _ParsingState
+ ) -> str | cabc.Sequence[str] | T_FLAG_NEEDS_VALUE:
+ nargs = option.nargs
+
+ value: str | cabc.Sequence[str] | T_FLAG_NEEDS_VALUE
+
+ if len(state.rargs) < nargs:
+ if option.obj._flag_needs_value:
+ # Option allows omitting the value.
+ value = FLAG_NEEDS_VALUE
+ else:
+ raise BadOptionUsage(
+ option_name,
+ ngettext(
+ "Option {name!r} requires an argument.",
+ "Option {name!r} requires {nargs} arguments.",
+ nargs,
+ ).format(name=option_name, nargs=nargs),
+ )
+ elif nargs == 1:
+ next_rarg = state.rargs[0]
+
+ if (
+ option.obj._flag_needs_value
+ and isinstance(next_rarg, str)
+ and next_rarg[:1] in self._opt_prefixes
+ and len(next_rarg) > 1
+ ):
+ # The next arg looks like the start of an option, don't
+ # use it as the value if omitting the value is allowed.
+ value = FLAG_NEEDS_VALUE
+ else:
+ value = state.rargs.pop(0)
+ else:
+ value = tuple(state.rargs[:nargs])
+ del state.rargs[:nargs]
+
+ return value
+
+ def _process_opts(self, arg: str, state: _ParsingState) -> None:
+ explicit_value = None
+ # Long option handling happens in two parts. The first part is
+ # supporting explicitly attached values. In any case, we will try
+ # to long match the option first.
+ if "=" in arg:
+ long_opt, explicit_value = arg.split("=", 1)
+ else:
+ long_opt = arg
+ norm_long_opt = _normalize_opt(long_opt, self.ctx)
+
+ # At this point we will match the (assumed) long option through
+ # the long option matching code. Note that this allows options
+ # like "-foo" to be matched as long options.
+ try:
+ self._match_long_opt(norm_long_opt, explicit_value, state)
+ except NoSuchOption:
+ # At this point the long option matching failed, and we need
+ # to try with short options. However there is a special rule
+ # which says, that if we have a two character options prefix
+ # (applies to "--foo" for instance), we do not dispatch to the
+ # short option code and will instead raise the no option
+ # error.
+ if arg[:2] not in self._opt_prefixes:
+ self._match_short_opt(arg, state)
+ return
+
+ if not self.ignore_unknown_options:
+ raise
+
+ state.largs.append(arg)
+
+
+def __getattr__(name: str) -> object:
+ import warnings
+
+ if name in {
+ "OptionParser",
+ "Argument",
+ "Option",
+ "split_opt",
+ "normalize_opt",
+ "ParsingState",
+ }:
+ warnings.warn(
+ f"'parser.{name}' is deprecated and will be removed in Click 9.0."
+ " The old parser is available in 'optparse'.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return globals()[f"_{name}"]
+
+ if name == "split_arg_string":
+ from .shell_completion import split_arg_string
+
+ warnings.warn(
+ "Importing 'parser.split_arg_string' is deprecated, it will only be"
+ " available in 'shell_completion' in Click 9.0.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return split_arg_string
+
+ raise AttributeError(name)
diff --git a/Backend/venv/lib/python3.12/site-packages/click/py.typed b/Backend/venv/lib/python3.12/site-packages/click/py.typed
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/click/shell_completion.py b/Backend/venv/lib/python3.12/site-packages/click/shell_completion.py
new file mode 100644
index 00000000..8f1564c4
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click/shell_completion.py
@@ -0,0 +1,667 @@
+from __future__ import annotations
+
+import collections.abc as cabc
+import os
+import re
+import typing as t
+from gettext import gettext as _
+
+from .core import Argument
+from .core import Command
+from .core import Context
+from .core import Group
+from .core import Option
+from .core import Parameter
+from .core import ParameterSource
+from .utils import echo
+
+
+def shell_complete(
+ cli: Command,
+ ctx_args: cabc.MutableMapping[str, t.Any],
+ prog_name: str,
+ complete_var: str,
+ instruction: str,
+) -> int:
+ """Perform shell completion for the given CLI program.
+
+ :param cli: Command being called.
+ :param ctx_args: Extra arguments to pass to
+ ``cli.make_context``.
+ :param prog_name: Name of the executable in the shell.
+ :param complete_var: Name of the environment variable that holds
+ the completion instruction.
+ :param instruction: Value of ``complete_var`` with the completion
+ instruction and shell, in the form ``instruction_shell``.
+ :return: Status code to exit with.
+ """
+ shell, _, instruction = instruction.partition("_")
+ comp_cls = get_completion_class(shell)
+
+ if comp_cls is None:
+ return 1
+
+ comp = comp_cls(cli, ctx_args, prog_name, complete_var)
+
+ if instruction == "source":
+ echo(comp.source())
+ return 0
+
+ if instruction == "complete":
+ echo(comp.complete())
+ return 0
+
+ return 1
+
+
+class CompletionItem:
+ """Represents a completion value and metadata about the value. The
+ default metadata is ``type`` to indicate special shell handling,
+ and ``help`` if a shell supports showing a help string next to the
+ value.
+
+ Arbitrary parameters can be passed when creating the object, and
+ accessed using ``item.attr``. If an attribute wasn't passed,
+ accessing it returns ``None``.
+
+ :param value: The completion suggestion.
+ :param type: Tells the shell script to provide special completion
+ support for the type. Click uses ``"dir"`` and ``"file"``.
+ :param help: String shown next to the value if supported.
+ :param kwargs: Arbitrary metadata. The built-in implementations
+ don't use this, but custom type completions paired with custom
+ shell support could use it.
+ """
+
+ __slots__ = ("value", "type", "help", "_info")
+
+ def __init__(
+ self,
+ value: t.Any,
+ type: str = "plain",
+ help: str | None = None,
+ **kwargs: t.Any,
+ ) -> None:
+ self.value: t.Any = value
+ self.type: str = type
+ self.help: str | None = help
+ self._info = kwargs
+
+ def __getattr__(self, name: str) -> t.Any:
+ return self._info.get(name)
+
+
+# Only Bash >= 4.4 has the nosort option.
+_SOURCE_BASH = """\
+%(complete_func)s() {
+ local IFS=$'\\n'
+ local response
+
+ response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \
+%(complete_var)s=bash_complete $1)
+
+ for completion in $response; do
+ IFS=',' read type value <<< "$completion"
+
+ if [[ $type == 'dir' ]]; then
+ COMPREPLY=()
+ compopt -o dirnames
+ elif [[ $type == 'file' ]]; then
+ COMPREPLY=()
+ compopt -o default
+ elif [[ $type == 'plain' ]]; then
+ COMPREPLY+=($value)
+ fi
+ done
+
+ return 0
+}
+
+%(complete_func)s_setup() {
+ complete -o nosort -F %(complete_func)s %(prog_name)s
+}
+
+%(complete_func)s_setup;
+"""
+
+# See ZshComplete.format_completion below, and issue #2703, before
+# changing this script.
+#
+# (TL;DR: _describe is picky about the format, but this Zsh script snippet
+# is already widely deployed. So freeze this script, and use clever-ish
+# handling of colons in ZshComplet.format_completion.)
+_SOURCE_ZSH = """\
+#compdef %(prog_name)s
+
+%(complete_func)s() {
+ local -a completions
+ local -a completions_with_descriptions
+ local -a response
+ (( ! $+commands[%(prog_name)s] )) && return 1
+
+ response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \
+%(complete_var)s=zsh_complete %(prog_name)s)}")
+
+ for type key descr in ${response}; do
+ if [[ "$type" == "plain" ]]; then
+ if [[ "$descr" == "_" ]]; then
+ completions+=("$key")
+ else
+ completions_with_descriptions+=("$key":"$descr")
+ fi
+ elif [[ "$type" == "dir" ]]; then
+ _path_files -/
+ elif [[ "$type" == "file" ]]; then
+ _path_files -f
+ fi
+ done
+
+ if [ -n "$completions_with_descriptions" ]; then
+ _describe -V unsorted completions_with_descriptions -U
+ fi
+
+ if [ -n "$completions" ]; then
+ compadd -U -V unsorted -a completions
+ fi
+}
+
+if [[ $zsh_eval_context[-1] == loadautofunc ]]; then
+ # autoload from fpath, call function directly
+ %(complete_func)s "$@"
+else
+ # eval/source/. command, register function for later
+ compdef %(complete_func)s %(prog_name)s
+fi
+"""
+
+_SOURCE_FISH = """\
+function %(complete_func)s;
+ set -l response (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \
+COMP_CWORD=(commandline -t) %(prog_name)s);
+
+ for completion in $response;
+ set -l metadata (string split "," $completion);
+
+ if test $metadata[1] = "dir";
+ __fish_complete_directories $metadata[2];
+ else if test $metadata[1] = "file";
+ __fish_complete_path $metadata[2];
+ else if test $metadata[1] = "plain";
+ echo $metadata[2];
+ end;
+ end;
+end;
+
+complete --no-files --command %(prog_name)s --arguments \
+"(%(complete_func)s)";
+"""
+
+
+class ShellComplete:
+ """Base class for providing shell completion support. A subclass for
+ a given shell will override attributes and methods to implement the
+ completion instructions (``source`` and ``complete``).
+
+ :param cli: Command being called.
+ :param prog_name: Name of the executable in the shell.
+ :param complete_var: Name of the environment variable that holds
+ the completion instruction.
+
+ .. versionadded:: 8.0
+ """
+
+ name: t.ClassVar[str]
+ """Name to register the shell as with :func:`add_completion_class`.
+ This is used in completion instructions (``{name}_source`` and
+ ``{name}_complete``).
+ """
+
+ source_template: t.ClassVar[str]
+ """Completion script template formatted by :meth:`source`. This must
+ be provided by subclasses.
+ """
+
+ def __init__(
+ self,
+ cli: Command,
+ ctx_args: cabc.MutableMapping[str, t.Any],
+ prog_name: str,
+ complete_var: str,
+ ) -> None:
+ self.cli = cli
+ self.ctx_args = ctx_args
+ self.prog_name = prog_name
+ self.complete_var = complete_var
+
+ @property
+ def func_name(self) -> str:
+ """The name of the shell function defined by the completion
+ script.
+ """
+ safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), flags=re.ASCII)
+ return f"_{safe_name}_completion"
+
+ def source_vars(self) -> dict[str, t.Any]:
+ """Vars for formatting :attr:`source_template`.
+
+ By default this provides ``complete_func``, ``complete_var``,
+ and ``prog_name``.
+ """
+ return {
+ "complete_func": self.func_name,
+ "complete_var": self.complete_var,
+ "prog_name": self.prog_name,
+ }
+
+ def source(self) -> str:
+ """Produce the shell script that defines the completion
+ function. By default this ``%``-style formats
+ :attr:`source_template` with the dict returned by
+ :meth:`source_vars`.
+ """
+ return self.source_template % self.source_vars()
+
+ def get_completion_args(self) -> tuple[list[str], str]:
+ """Use the env vars defined by the shell script to return a
+ tuple of ``args, incomplete``. This must be implemented by
+ subclasses.
+ """
+ raise NotImplementedError
+
+ def get_completions(self, args: list[str], incomplete: str) -> list[CompletionItem]:
+ """Determine the context and last complete command or parameter
+ from the complete args. Call that object's ``shell_complete``
+ method to get the completions for the incomplete value.
+
+ :param args: List of complete args before the incomplete value.
+ :param incomplete: Value being completed. May be empty.
+ """
+ ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args)
+ obj, incomplete = _resolve_incomplete(ctx, args, incomplete)
+ return obj.shell_complete(ctx, incomplete)
+
+ def format_completion(self, item: CompletionItem) -> str:
+ """Format a completion item into the form recognized by the
+ shell script. This must be implemented by subclasses.
+
+ :param item: Completion item to format.
+ """
+ raise NotImplementedError
+
+ def complete(self) -> str:
+ """Produce the completion data to send back to the shell.
+
+ By default this calls :meth:`get_completion_args`, gets the
+ completions, then calls :meth:`format_completion` for each
+ completion.
+ """
+ args, incomplete = self.get_completion_args()
+ completions = self.get_completions(args, incomplete)
+ out = [self.format_completion(item) for item in completions]
+ return "\n".join(out)
+
+
+class BashComplete(ShellComplete):
+ """Shell completion for Bash."""
+
+ name = "bash"
+ source_template = _SOURCE_BASH
+
+ @staticmethod
+ def _check_version() -> None:
+ import shutil
+ import subprocess
+
+ bash_exe = shutil.which("bash")
+
+ if bash_exe is None:
+ match = None
+ else:
+ output = subprocess.run(
+ [bash_exe, "--norc", "-c", 'echo "${BASH_VERSION}"'],
+ stdout=subprocess.PIPE,
+ )
+ match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode())
+
+ if match is not None:
+ major, minor = match.groups()
+
+ if major < "4" or major == "4" and minor < "4":
+ echo(
+ _(
+ "Shell completion is not supported for Bash"
+ " versions older than 4.4."
+ ),
+ err=True,
+ )
+ else:
+ echo(
+ _("Couldn't detect Bash version, shell completion is not supported."),
+ err=True,
+ )
+
+ def source(self) -> str:
+ self._check_version()
+ return super().source()
+
+ def get_completion_args(self) -> tuple[list[str], str]:
+ cwords = split_arg_string(os.environ["COMP_WORDS"])
+ cword = int(os.environ["COMP_CWORD"])
+ args = cwords[1:cword]
+
+ try:
+ incomplete = cwords[cword]
+ except IndexError:
+ incomplete = ""
+
+ return args, incomplete
+
+ def format_completion(self, item: CompletionItem) -> str:
+ return f"{item.type},{item.value}"
+
+
+class ZshComplete(ShellComplete):
+ """Shell completion for Zsh."""
+
+ name = "zsh"
+ source_template = _SOURCE_ZSH
+
+ def get_completion_args(self) -> tuple[list[str], str]:
+ cwords = split_arg_string(os.environ["COMP_WORDS"])
+ cword = int(os.environ["COMP_CWORD"])
+ args = cwords[1:cword]
+
+ try:
+ incomplete = cwords[cword]
+ except IndexError:
+ incomplete = ""
+
+ return args, incomplete
+
+ def format_completion(self, item: CompletionItem) -> str:
+ help_ = item.help or "_"
+ # The zsh completion script uses `_describe` on items with help
+ # texts (which splits the item help from the item value at the
+ # first unescaped colon) and `compadd` on items without help
+ # text (which uses the item value as-is and does not support
+ # colon escaping). So escape colons in the item value if and
+ # only if the item help is not the sentinel "_" value, as used
+ # by the completion script.
+ #
+ # (The zsh completion script is potentially widely deployed, and
+ # thus harder to fix than this method.)
+ #
+ # See issue #1812 and issue #2703 for further context.
+ value = item.value.replace(":", r"\:") if help_ != "_" else item.value
+ return f"{item.type}\n{value}\n{help_}"
+
+
+class FishComplete(ShellComplete):
+ """Shell completion for Fish."""
+
+ name = "fish"
+ source_template = _SOURCE_FISH
+
+ def get_completion_args(self) -> tuple[list[str], str]:
+ cwords = split_arg_string(os.environ["COMP_WORDS"])
+ incomplete = os.environ["COMP_CWORD"]
+ if incomplete:
+ incomplete = split_arg_string(incomplete)[0]
+ args = cwords[1:]
+
+ # Fish stores the partial word in both COMP_WORDS and
+ # COMP_CWORD, remove it from complete args.
+ if incomplete and args and args[-1] == incomplete:
+ args.pop()
+
+ return args, incomplete
+
+ def format_completion(self, item: CompletionItem) -> str:
+ if item.help:
+ return f"{item.type},{item.value}\t{item.help}"
+
+ return f"{item.type},{item.value}"
+
+
+ShellCompleteType = t.TypeVar("ShellCompleteType", bound="type[ShellComplete]")
+
+
+_available_shells: dict[str, type[ShellComplete]] = {
+ "bash": BashComplete,
+ "fish": FishComplete,
+ "zsh": ZshComplete,
+}
+
+
+def add_completion_class(
+ cls: ShellCompleteType, name: str | None = None
+) -> ShellCompleteType:
+ """Register a :class:`ShellComplete` subclass under the given name.
+ The name will be provided by the completion instruction environment
+ variable during completion.
+
+ :param cls: The completion class that will handle completion for the
+ shell.
+ :param name: Name to register the class under. Defaults to the
+ class's ``name`` attribute.
+ """
+ if name is None:
+ name = cls.name
+
+ _available_shells[name] = cls
+
+ return cls
+
+
+def get_completion_class(shell: str) -> type[ShellComplete] | None:
+ """Look up a registered :class:`ShellComplete` subclass by the name
+ provided by the completion instruction environment variable. If the
+ name isn't registered, returns ``None``.
+
+ :param shell: Name the class is registered under.
+ """
+ return _available_shells.get(shell)
+
+
+def split_arg_string(string: str) -> list[str]:
+ """Split an argument string as with :func:`shlex.split`, but don't
+ fail if the string is incomplete. Ignores a missing closing quote or
+ incomplete escape sequence and uses the partial token as-is.
+
+ .. code-block:: python
+
+ split_arg_string("example 'my file")
+ ["example", "my file"]
+
+ split_arg_string("example my\\")
+ ["example", "my"]
+
+ :param string: String to split.
+
+ .. versionchanged:: 8.2
+ Moved to ``shell_completion`` from ``parser``.
+ """
+ import shlex
+
+ lex = shlex.shlex(string, posix=True)
+ lex.whitespace_split = True
+ lex.commenters = ""
+ out = []
+
+ try:
+ for token in lex:
+ out.append(token)
+ except ValueError:
+ # Raised when end-of-string is reached in an invalid state. Use
+ # the partial token as-is. The quote or escape character is in
+ # lex.state, not lex.token.
+ out.append(lex.token)
+
+ return out
+
+
+def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool:
+ """Determine if the given parameter is an argument that can still
+ accept values.
+
+ :param ctx: Invocation context for the command represented by the
+ parsed complete args.
+ :param param: Argument object being checked.
+ """
+ if not isinstance(param, Argument):
+ return False
+
+ assert param.name is not None
+ # Will be None if expose_value is False.
+ value = ctx.params.get(param.name)
+ return (
+ param.nargs == -1
+ or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE
+ or (
+ param.nargs > 1
+ and isinstance(value, (tuple, list))
+ and len(value) < param.nargs
+ )
+ )
+
+
+def _start_of_option(ctx: Context, value: str) -> bool:
+ """Check if the value looks like the start of an option."""
+ if not value:
+ return False
+
+ c = value[0]
+ return c in ctx._opt_prefixes
+
+
+def _is_incomplete_option(ctx: Context, args: list[str], param: Parameter) -> bool:
+ """Determine if the given parameter is an option that needs a value.
+
+ :param args: List of complete args before the incomplete value.
+ :param param: Option object being checked.
+ """
+ if not isinstance(param, Option):
+ return False
+
+ if param.is_flag or param.count:
+ return False
+
+ last_option = None
+
+ for index, arg in enumerate(reversed(args)):
+ if index + 1 > param.nargs:
+ break
+
+ if _start_of_option(ctx, arg):
+ last_option = arg
+ break
+
+ return last_option is not None and last_option in param.opts
+
+
+def _resolve_context(
+ cli: Command,
+ ctx_args: cabc.MutableMapping[str, t.Any],
+ prog_name: str,
+ args: list[str],
+) -> Context:
+ """Produce the context hierarchy starting with the command and
+ traversing the complete arguments. This only follows the commands,
+ it doesn't trigger input prompts or callbacks.
+
+ :param cli: Command being called.
+ :param prog_name: Name of the executable in the shell.
+ :param args: List of complete args before the incomplete value.
+ """
+ ctx_args["resilient_parsing"] = True
+ with cli.make_context(prog_name, args.copy(), **ctx_args) as ctx:
+ args = ctx._protected_args + ctx.args
+
+ while args:
+ command = ctx.command
+
+ if isinstance(command, Group):
+ if not command.chain:
+ name, cmd, args = command.resolve_command(ctx, args)
+
+ if cmd is None:
+ return ctx
+
+ with cmd.make_context(
+ name, args, parent=ctx, resilient_parsing=True
+ ) as sub_ctx:
+ ctx = sub_ctx
+ args = ctx._protected_args + ctx.args
+ else:
+ sub_ctx = ctx
+
+ while args:
+ name, cmd, args = command.resolve_command(ctx, args)
+
+ if cmd is None:
+ return ctx
+
+ with cmd.make_context(
+ name,
+ args,
+ parent=ctx,
+ allow_extra_args=True,
+ allow_interspersed_args=False,
+ resilient_parsing=True,
+ ) as sub_sub_ctx:
+ sub_ctx = sub_sub_ctx
+ args = sub_ctx.args
+
+ ctx = sub_ctx
+ args = [*sub_ctx._protected_args, *sub_ctx.args]
+ else:
+ break
+
+ return ctx
+
+
+def _resolve_incomplete(
+ ctx: Context, args: list[str], incomplete: str
+) -> tuple[Command | Parameter, str]:
+ """Find the Click object that will handle the completion of the
+ incomplete value. Return the object and the incomplete value.
+
+ :param ctx: Invocation context for the command represented by
+ the parsed complete args.
+ :param args: List of complete args before the incomplete value.
+ :param incomplete: Value being completed. May be empty.
+ """
+ # Different shells treat an "=" between a long option name and
+ # value differently. Might keep the value joined, return the "="
+ # as a separate item, or return the split name and value. Always
+ # split and discard the "=" to make completion easier.
+ if incomplete == "=":
+ incomplete = ""
+ elif "=" in incomplete and _start_of_option(ctx, incomplete):
+ name, _, incomplete = incomplete.partition("=")
+ args.append(name)
+
+ # The "--" marker tells Click to stop treating values as options
+ # even if they start with the option character. If it hasn't been
+ # given and the incomplete arg looks like an option, the current
+ # command will provide option name completions.
+ if "--" not in args and _start_of_option(ctx, incomplete):
+ return ctx.command, incomplete
+
+ params = ctx.command.get_params(ctx)
+
+ # If the last complete arg is an option name with an incomplete
+ # value, the option will provide value completions.
+ for param in params:
+ if _is_incomplete_option(ctx, args, param):
+ return param, incomplete
+
+ # It's not an option name or value. The first argument without a
+ # parsed value will provide value completions.
+ for param in params:
+ if _is_incomplete_argument(ctx, param):
+ return param, incomplete
+
+ # There were no unparsed arguments, the command may be a group that
+ # will provide command name completions.
+ return ctx.command, incomplete
diff --git a/Backend/venv/lib/python3.12/site-packages/click/termui.py b/Backend/venv/lib/python3.12/site-packages/click/termui.py
new file mode 100644
index 00000000..2e98a077
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click/termui.py
@@ -0,0 +1,883 @@
+from __future__ import annotations
+
+import collections.abc as cabc
+import inspect
+import io
+import itertools
+import sys
+import typing as t
+from contextlib import AbstractContextManager
+from gettext import gettext as _
+
+from ._compat import isatty
+from ._compat import strip_ansi
+from .exceptions import Abort
+from .exceptions import UsageError
+from .globals import resolve_color_default
+from .types import Choice
+from .types import convert_type
+from .types import ParamType
+from .utils import echo
+from .utils import LazyFile
+
+if t.TYPE_CHECKING:
+ from ._termui_impl import ProgressBar
+
+V = t.TypeVar("V")
+
+# The prompt functions to use. The doc tools currently override these
+# functions to customize how they work.
+visible_prompt_func: t.Callable[[str], str] = input
+
+_ansi_colors = {
+ "black": 30,
+ "red": 31,
+ "green": 32,
+ "yellow": 33,
+ "blue": 34,
+ "magenta": 35,
+ "cyan": 36,
+ "white": 37,
+ "reset": 39,
+ "bright_black": 90,
+ "bright_red": 91,
+ "bright_green": 92,
+ "bright_yellow": 93,
+ "bright_blue": 94,
+ "bright_magenta": 95,
+ "bright_cyan": 96,
+ "bright_white": 97,
+}
+_ansi_reset_all = "\033[0m"
+
+
+def hidden_prompt_func(prompt: str) -> str:
+ import getpass
+
+ return getpass.getpass(prompt)
+
+
+def _build_prompt(
+ text: str,
+ suffix: str,
+ show_default: bool = False,
+ default: t.Any | None = None,
+ show_choices: bool = True,
+ type: ParamType | None = None,
+) -> str:
+ prompt = text
+ if type is not None and show_choices and isinstance(type, Choice):
+ prompt += f" ({', '.join(map(str, type.choices))})"
+ if default is not None and show_default:
+ prompt = f"{prompt} [{_format_default(default)}]"
+ return f"{prompt}{suffix}"
+
+
+def _format_default(default: t.Any) -> t.Any:
+ if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"):
+ return default.name
+
+ return default
+
+
+def prompt(
+ text: str,
+ default: t.Any | None = None,
+ hide_input: bool = False,
+ confirmation_prompt: bool | str = False,
+ type: ParamType | t.Any | None = None,
+ value_proc: t.Callable[[str], t.Any] | None = None,
+ prompt_suffix: str = ": ",
+ show_default: bool = True,
+ err: bool = False,
+ show_choices: bool = True,
+) -> t.Any:
+ """Prompts a user for input. This is a convenience function that can
+ be used to prompt a user for input later.
+
+ If the user aborts the input by sending an interrupt signal, this
+ function will catch it and raise a :exc:`Abort` exception.
+
+ :param text: the text to show for the prompt.
+ :param default: the default value to use if no input happens. If this
+ is not given it will prompt until it's aborted.
+ :param hide_input: if this is set to true then the input value will
+ be hidden.
+ :param confirmation_prompt: Prompt a second time to confirm the
+ value. Can be set to a string instead of ``True`` to customize
+ the message.
+ :param type: the type to use to check the value against.
+ :param value_proc: if this parameter is provided it's a function that
+ is invoked instead of the type conversion to
+ convert a value.
+ :param prompt_suffix: a suffix that should be added to the prompt.
+ :param show_default: shows or hides the default value in the prompt.
+ :param err: if set to true the file defaults to ``stderr`` instead of
+ ``stdout``, the same as with echo.
+ :param show_choices: Show or hide choices if the passed type is a Choice.
+ For example if type is a Choice of either day or week,
+ show_choices is true and text is "Group by" then the
+ prompt will be "Group by (day, week): ".
+
+ .. versionchanged:: 8.3.1
+ A space is no longer appended to the prompt.
+
+ .. versionadded:: 8.0
+ ``confirmation_prompt`` can be a custom string.
+
+ .. versionadded:: 7.0
+ Added the ``show_choices`` parameter.
+
+ .. versionadded:: 6.0
+ Added unicode support for cmd.exe on Windows.
+
+ .. versionadded:: 4.0
+ Added the `err` parameter.
+
+ """
+
+ def prompt_func(text: str) -> str:
+ f = hidden_prompt_func if hide_input else visible_prompt_func
+ try:
+ # Write the prompt separately so that we get nice
+ # coloring through colorama on Windows
+ echo(text[:-1], nl=False, err=err)
+ # Echo the last character to stdout to work around an issue where
+ # readline causes backspace to clear the whole line.
+ return f(text[-1:])
+ except (KeyboardInterrupt, EOFError):
+ # getpass doesn't print a newline if the user aborts input with ^C.
+ # Allegedly this behavior is inherited from getpass(3).
+ # A doc bug has been filed at https://bugs.python.org/issue24711
+ if hide_input:
+ echo(None, err=err)
+ raise Abort() from None
+
+ if value_proc is None:
+ value_proc = convert_type(type, default)
+
+ prompt = _build_prompt(
+ text, prompt_suffix, show_default, default, show_choices, type
+ )
+
+ if confirmation_prompt:
+ if confirmation_prompt is True:
+ confirmation_prompt = _("Repeat for confirmation")
+
+ confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix)
+
+ while True:
+ while True:
+ value = prompt_func(prompt)
+ if value:
+ break
+ elif default is not None:
+ value = default
+ break
+ try:
+ result = value_proc(value)
+ except UsageError as e:
+ if hide_input:
+ echo(_("Error: The value you entered was invalid."), err=err)
+ else:
+ echo(_("Error: {e.message}").format(e=e), err=err)
+ continue
+ if not confirmation_prompt:
+ return result
+ while True:
+ value2 = prompt_func(confirmation_prompt)
+ is_empty = not value and not value2
+ if value2 or is_empty:
+ break
+ if value == value2:
+ return result
+ echo(_("Error: The two entered values do not match."), err=err)
+
+
+def confirm(
+ text: str,
+ default: bool | None = False,
+ abort: bool = False,
+ prompt_suffix: str = ": ",
+ show_default: bool = True,
+ err: bool = False,
+) -> bool:
+ """Prompts for confirmation (yes/no question).
+
+ If the user aborts the input by sending a interrupt signal this
+ function will catch it and raise a :exc:`Abort` exception.
+
+ :param text: the question to ask.
+ :param default: The default value to use when no input is given. If
+ ``None``, repeat until input is given.
+ :param abort: if this is set to `True` a negative answer aborts the
+ exception by raising :exc:`Abort`.
+ :param prompt_suffix: a suffix that should be added to the prompt.
+ :param show_default: shows or hides the default value in the prompt.
+ :param err: if set to true the file defaults to ``stderr`` instead of
+ ``stdout``, the same as with echo.
+
+ .. versionchanged:: 8.3.1
+ A space is no longer appended to the prompt.
+
+ .. versionchanged:: 8.0
+ Repeat until input is given if ``default`` is ``None``.
+
+ .. versionadded:: 4.0
+ Added the ``err`` parameter.
+ """
+ prompt = _build_prompt(
+ text,
+ prompt_suffix,
+ show_default,
+ "y/n" if default is None else ("Y/n" if default else "y/N"),
+ )
+
+ while True:
+ try:
+ # Write the prompt separately so that we get nice
+ # coloring through colorama on Windows
+ echo(prompt[:-1], nl=False, err=err)
+ # Echo the last character to stdout to work around an issue where
+ # readline causes backspace to clear the whole line.
+ value = visible_prompt_func(prompt[-1:]).lower().strip()
+ except (KeyboardInterrupt, EOFError):
+ raise Abort() from None
+ if value in ("y", "yes"):
+ rv = True
+ elif value in ("n", "no"):
+ rv = False
+ elif default is not None and value == "":
+ rv = default
+ else:
+ echo(_("Error: invalid input"), err=err)
+ continue
+ break
+ if abort and not rv:
+ raise Abort()
+ return rv
+
+
+def echo_via_pager(
+ text_or_generator: cabc.Iterable[str] | t.Callable[[], cabc.Iterable[str]] | str,
+ color: bool | None = None,
+) -> None:
+ """This function takes a text and shows it via an environment specific
+ pager on stdout.
+
+ .. versionchanged:: 3.0
+ Added the `color` flag.
+
+ :param text_or_generator: the text to page, or alternatively, a
+ generator emitting the text to page.
+ :param color: controls if the pager supports ANSI colors or not. The
+ default is autodetection.
+ """
+ color = resolve_color_default(color)
+
+ if inspect.isgeneratorfunction(text_or_generator):
+ i = t.cast("t.Callable[[], cabc.Iterable[str]]", text_or_generator)()
+ elif isinstance(text_or_generator, str):
+ i = [text_or_generator]
+ else:
+ i = iter(t.cast("cabc.Iterable[str]", text_or_generator))
+
+ # convert every element of i to a text type if necessary
+ text_generator = (el if isinstance(el, str) else str(el) for el in i)
+
+ from ._termui_impl import pager
+
+ return pager(itertools.chain(text_generator, "\n"), color)
+
+
+@t.overload
+def progressbar(
+ *,
+ length: int,
+ label: str | None = None,
+ hidden: bool = False,
+ show_eta: bool = True,
+ show_percent: bool | None = None,
+ show_pos: bool = False,
+ fill_char: str = "#",
+ empty_char: str = "-",
+ bar_template: str = "%(label)s [%(bar)s] %(info)s",
+ info_sep: str = " ",
+ width: int = 36,
+ file: t.TextIO | None = None,
+ color: bool | None = None,
+ update_min_steps: int = 1,
+) -> ProgressBar[int]: ...
+
+
+@t.overload
+def progressbar(
+ iterable: cabc.Iterable[V] | None = None,
+ length: int | None = None,
+ label: str | None = None,
+ hidden: bool = False,
+ show_eta: bool = True,
+ show_percent: bool | None = None,
+ show_pos: bool = False,
+ item_show_func: t.Callable[[V | None], str | None] | None = None,
+ fill_char: str = "#",
+ empty_char: str = "-",
+ bar_template: str = "%(label)s [%(bar)s] %(info)s",
+ info_sep: str = " ",
+ width: int = 36,
+ file: t.TextIO | None = None,
+ color: bool | None = None,
+ update_min_steps: int = 1,
+) -> ProgressBar[V]: ...
+
+
+def progressbar(
+ iterable: cabc.Iterable[V] | None = None,
+ length: int | None = None,
+ label: str | None = None,
+ hidden: bool = False,
+ show_eta: bool = True,
+ show_percent: bool | None = None,
+ show_pos: bool = False,
+ item_show_func: t.Callable[[V | None], str | None] | None = None,
+ fill_char: str = "#",
+ empty_char: str = "-",
+ bar_template: str = "%(label)s [%(bar)s] %(info)s",
+ info_sep: str = " ",
+ width: int = 36,
+ file: t.TextIO | None = None,
+ color: bool | None = None,
+ update_min_steps: int = 1,
+) -> ProgressBar[V]:
+ """This function creates an iterable context manager that can be used
+ to iterate over something while showing a progress bar. It will
+ either iterate over the `iterable` or `length` items (that are counted
+ up). While iteration happens, this function will print a rendered
+ progress bar to the given `file` (defaults to stdout) and will attempt
+ to calculate remaining time and more. By default, this progress bar
+ will not be rendered if the file is not a terminal.
+
+ The context manager creates the progress bar. When the context
+ manager is entered the progress bar is already created. With every
+ iteration over the progress bar, the iterable passed to the bar is
+ advanced and the bar is updated. When the context manager exits,
+ a newline is printed and the progress bar is finalized on screen.
+
+ Note: The progress bar is currently designed for use cases where the
+ total progress can be expected to take at least several seconds.
+ Because of this, the ProgressBar class object won't display
+ progress that is considered too fast, and progress where the time
+ between steps is less than a second.
+
+ No printing must happen or the progress bar will be unintentionally
+ destroyed.
+
+ Example usage::
+
+ with progressbar(items) as bar:
+ for item in bar:
+ do_something_with(item)
+
+ Alternatively, if no iterable is specified, one can manually update the
+ progress bar through the `update()` method instead of directly
+ iterating over the progress bar. The update method accepts the number
+ of steps to increment the bar with::
+
+ with progressbar(length=chunks.total_bytes) as bar:
+ for chunk in chunks:
+ process_chunk(chunk)
+ bar.update(chunks.bytes)
+
+ The ``update()`` method also takes an optional value specifying the
+ ``current_item`` at the new position. This is useful when used
+ together with ``item_show_func`` to customize the output for each
+ manual step::
+
+ with click.progressbar(
+ length=total_size,
+ label='Unzipping archive',
+ item_show_func=lambda a: a.filename
+ ) as bar:
+ for archive in zip_file:
+ archive.extract()
+ bar.update(archive.size, archive)
+
+ :param iterable: an iterable to iterate over. If not provided the length
+ is required.
+ :param length: the number of items to iterate over. By default the
+ progressbar will attempt to ask the iterator about its
+ length, which might or might not work. If an iterable is
+ also provided this parameter can be used to override the
+ length. If an iterable is not provided the progress bar
+ will iterate over a range of that length.
+ :param label: the label to show next to the progress bar.
+ :param hidden: hide the progressbar. Defaults to ``False``. When no tty is
+ detected, it will only print the progressbar label. Setting this to
+ ``False`` also disables that.
+ :param show_eta: enables or disables the estimated time display. This is
+ automatically disabled if the length cannot be
+ determined.
+ :param show_percent: enables or disables the percentage display. The
+ default is `True` if the iterable has a length or
+ `False` if not.
+ :param show_pos: enables or disables the absolute position display. The
+ default is `False`.
+ :param item_show_func: A function called with the current item which
+ can return a string to show next to the progress bar. If the
+ function returns ``None`` nothing is shown. The current item can
+ be ``None``, such as when entering and exiting the bar.
+ :param fill_char: the character to use to show the filled part of the
+ progress bar.
+ :param empty_char: the character to use to show the non-filled part of
+ the progress bar.
+ :param bar_template: the format string to use as template for the bar.
+ The parameters in it are ``label`` for the label,
+ ``bar`` for the progress bar and ``info`` for the
+ info section.
+ :param info_sep: the separator between multiple info items (eta etc.)
+ :param width: the width of the progress bar in characters, 0 means full
+ terminal width
+ :param file: The file to write to. If this is not a terminal then
+ only the label is printed.
+ :param color: controls if the terminal supports ANSI colors or not. The
+ default is autodetection. This is only needed if ANSI
+ codes are included anywhere in the progress bar output
+ which is not the case by default.
+ :param update_min_steps: Render only when this many updates have
+ completed. This allows tuning for very fast iterators.
+
+ .. versionadded:: 8.2
+ The ``hidden`` argument.
+
+ .. versionchanged:: 8.0
+ Output is shown even if execution time is less than 0.5 seconds.
+
+ .. versionchanged:: 8.0
+ ``item_show_func`` shows the current item, not the previous one.
+
+ .. versionchanged:: 8.0
+ Labels are echoed if the output is not a TTY. Reverts a change
+ in 7.0 that removed all output.
+
+ .. versionadded:: 8.0
+ The ``update_min_steps`` parameter.
+
+ .. versionadded:: 4.0
+ The ``color`` parameter and ``update`` method.
+
+ .. versionadded:: 2.0
+ """
+ from ._termui_impl import ProgressBar
+
+ color = resolve_color_default(color)
+ return ProgressBar(
+ iterable=iterable,
+ length=length,
+ hidden=hidden,
+ show_eta=show_eta,
+ show_percent=show_percent,
+ show_pos=show_pos,
+ item_show_func=item_show_func,
+ fill_char=fill_char,
+ empty_char=empty_char,
+ bar_template=bar_template,
+ info_sep=info_sep,
+ file=file,
+ label=label,
+ width=width,
+ color=color,
+ update_min_steps=update_min_steps,
+ )
+
+
+def clear() -> None:
+ """Clears the terminal screen. This will have the effect of clearing
+ the whole visible space of the terminal and moving the cursor to the
+ top left. This does not do anything if not connected to a terminal.
+
+ .. versionadded:: 2.0
+ """
+ if not isatty(sys.stdout):
+ return
+
+ # ANSI escape \033[2J clears the screen, \033[1;1H moves the cursor
+ echo("\033[2J\033[1;1H", nl=False)
+
+
+def _interpret_color(color: int | tuple[int, int, int] | str, offset: int = 0) -> str:
+ if isinstance(color, int):
+ return f"{38 + offset};5;{color:d}"
+
+ if isinstance(color, (tuple, list)):
+ r, g, b = color
+ return f"{38 + offset};2;{r:d};{g:d};{b:d}"
+
+ return str(_ansi_colors[color] + offset)
+
+
+def style(
+ text: t.Any,
+ fg: int | tuple[int, int, int] | str | None = None,
+ bg: int | tuple[int, int, int] | str | None = None,
+ bold: bool | None = None,
+ dim: bool | None = None,
+ underline: bool | None = None,
+ overline: bool | None = None,
+ italic: bool | None = None,
+ blink: bool | None = None,
+ reverse: bool | None = None,
+ strikethrough: bool | None = None,
+ reset: bool = True,
+) -> str:
+ """Styles a text with ANSI styles and returns the new string. By
+ default the styling is self contained which means that at the end
+ of the string a reset code is issued. This can be prevented by
+ passing ``reset=False``.
+
+ Examples::
+
+ click.echo(click.style('Hello World!', fg='green'))
+ click.echo(click.style('ATTENTION!', blink=True))
+ click.echo(click.style('Some things', reverse=True, fg='cyan'))
+ click.echo(click.style('More colors', fg=(255, 12, 128), bg=117))
+
+ Supported color names:
+
+ * ``black`` (might be a gray)
+ * ``red``
+ * ``green``
+ * ``yellow`` (might be an orange)
+ * ``blue``
+ * ``magenta``
+ * ``cyan``
+ * ``white`` (might be light gray)
+ * ``bright_black``
+ * ``bright_red``
+ * ``bright_green``
+ * ``bright_yellow``
+ * ``bright_blue``
+ * ``bright_magenta``
+ * ``bright_cyan``
+ * ``bright_white``
+ * ``reset`` (reset the color code only)
+
+ If the terminal supports it, color may also be specified as:
+
+ - An integer in the interval [0, 255]. The terminal must support
+ 8-bit/256-color mode.
+ - An RGB tuple of three integers in [0, 255]. The terminal must
+ support 24-bit/true-color mode.
+
+ See https://en.wikipedia.org/wiki/ANSI_color and
+ https://gist.github.com/XVilka/8346728 for more information.
+
+ :param text: the string to style with ansi codes.
+ :param fg: if provided this will become the foreground color.
+ :param bg: if provided this will become the background color.
+ :param bold: if provided this will enable or disable bold mode.
+ :param dim: if provided this will enable or disable dim mode. This is
+ badly supported.
+ :param underline: if provided this will enable or disable underline.
+ :param overline: if provided this will enable or disable overline.
+ :param italic: if provided this will enable or disable italic.
+ :param blink: if provided this will enable or disable blinking.
+ :param reverse: if provided this will enable or disable inverse
+ rendering (foreground becomes background and the
+ other way round).
+ :param strikethrough: if provided this will enable or disable
+ striking through text.
+ :param reset: by default a reset-all code is added at the end of the
+ string which means that styles do not carry over. This
+ can be disabled to compose styles.
+
+ .. versionchanged:: 8.0
+ A non-string ``message`` is converted to a string.
+
+ .. versionchanged:: 8.0
+ Added support for 256 and RGB color codes.
+
+ .. versionchanged:: 8.0
+ Added the ``strikethrough``, ``italic``, and ``overline``
+ parameters.
+
+ .. versionchanged:: 7.0
+ Added support for bright colors.
+
+ .. versionadded:: 2.0
+ """
+ if not isinstance(text, str):
+ text = str(text)
+
+ bits = []
+
+ if fg:
+ try:
+ bits.append(f"\033[{_interpret_color(fg)}m")
+ except KeyError:
+ raise TypeError(f"Unknown color {fg!r}") from None
+
+ if bg:
+ try:
+ bits.append(f"\033[{_interpret_color(bg, 10)}m")
+ except KeyError:
+ raise TypeError(f"Unknown color {bg!r}") from None
+
+ if bold is not None:
+ bits.append(f"\033[{1 if bold else 22}m")
+ if dim is not None:
+ bits.append(f"\033[{2 if dim else 22}m")
+ if underline is not None:
+ bits.append(f"\033[{4 if underline else 24}m")
+ if overline is not None:
+ bits.append(f"\033[{53 if overline else 55}m")
+ if italic is not None:
+ bits.append(f"\033[{3 if italic else 23}m")
+ if blink is not None:
+ bits.append(f"\033[{5 if blink else 25}m")
+ if reverse is not None:
+ bits.append(f"\033[{7 if reverse else 27}m")
+ if strikethrough is not None:
+ bits.append(f"\033[{9 if strikethrough else 29}m")
+ bits.append(text)
+ if reset:
+ bits.append(_ansi_reset_all)
+ return "".join(bits)
+
+
+def unstyle(text: str) -> str:
+ """Removes ANSI styling information from a string. Usually it's not
+ necessary to use this function as Click's echo function will
+ automatically remove styling if necessary.
+
+ .. versionadded:: 2.0
+
+ :param text: the text to remove style information from.
+ """
+ return strip_ansi(text)
+
+
+def secho(
+ message: t.Any | None = None,
+ file: t.IO[t.AnyStr] | None = None,
+ nl: bool = True,
+ err: bool = False,
+ color: bool | None = None,
+ **styles: t.Any,
+) -> None:
+ """This function combines :func:`echo` and :func:`style` into one
+ call. As such the following two calls are the same::
+
+ click.secho('Hello World!', fg='green')
+ click.echo(click.style('Hello World!', fg='green'))
+
+ All keyword arguments are forwarded to the underlying functions
+ depending on which one they go with.
+
+ Non-string types will be converted to :class:`str`. However,
+ :class:`bytes` are passed directly to :meth:`echo` without applying
+ style. If you want to style bytes that represent text, call
+ :meth:`bytes.decode` first.
+
+ .. versionchanged:: 8.0
+ A non-string ``message`` is converted to a string. Bytes are
+ passed through without style applied.
+
+ .. versionadded:: 2.0
+ """
+ if message is not None and not isinstance(message, (bytes, bytearray)):
+ message = style(message, **styles)
+
+ return echo(message, file=file, nl=nl, err=err, color=color)
+
+
+@t.overload
+def edit(
+ text: bytes | bytearray,
+ editor: str | None = None,
+ env: cabc.Mapping[str, str] | None = None,
+ require_save: bool = False,
+ extension: str = ".txt",
+) -> bytes | None: ...
+
+
+@t.overload
+def edit(
+ text: str,
+ editor: str | None = None,
+ env: cabc.Mapping[str, str] | None = None,
+ require_save: bool = True,
+ extension: str = ".txt",
+) -> str | None: ...
+
+
+@t.overload
+def edit(
+ text: None = None,
+ editor: str | None = None,
+ env: cabc.Mapping[str, str] | None = None,
+ require_save: bool = True,
+ extension: str = ".txt",
+ filename: str | cabc.Iterable[str] | None = None,
+) -> None: ...
+
+
+def edit(
+ text: str | bytes | bytearray | None = None,
+ editor: str | None = None,
+ env: cabc.Mapping[str, str] | None = None,
+ require_save: bool = True,
+ extension: str = ".txt",
+ filename: str | cabc.Iterable[str] | None = None,
+) -> str | bytes | bytearray | None:
+ r"""Edits the given text in the defined editor. If an editor is given
+ (should be the full path to the executable but the regular operating
+ system search path is used for finding the executable) it overrides
+ the detected editor. Optionally, some environment variables can be
+ used. If the editor is closed without changes, `None` is returned. In
+ case a file is edited directly the return value is always `None` and
+ `require_save` and `extension` are ignored.
+
+ If the editor cannot be opened a :exc:`UsageError` is raised.
+
+ Note for Windows: to simplify cross-platform usage, the newlines are
+ automatically converted from POSIX to Windows and vice versa. As such,
+ the message here will have ``\n`` as newline markers.
+
+ :param text: the text to edit.
+ :param editor: optionally the editor to use. Defaults to automatic
+ detection.
+ :param env: environment variables to forward to the editor.
+ :param require_save: if this is true, then not saving in the editor
+ will make the return value become `None`.
+ :param extension: the extension to tell the editor about. This defaults
+ to `.txt` but changing this might change syntax
+ highlighting.
+ :param filename: if provided it will edit this file instead of the
+ provided text contents. It will not use a temporary
+ file as an indirection in that case. If the editor supports
+ editing multiple files at once, a sequence of files may be
+ passed as well. Invoke `click.file` once per file instead
+ if multiple files cannot be managed at once or editing the
+ files serially is desired.
+
+ .. versionchanged:: 8.2.0
+ ``filename`` now accepts any ``Iterable[str]`` in addition to a ``str``
+ if the ``editor`` supports editing multiple files at once.
+
+ """
+ from ._termui_impl import Editor
+
+ ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension)
+
+ if filename is None:
+ return ed.edit(text)
+
+ if isinstance(filename, str):
+ filename = (filename,)
+
+ ed.edit_files(filenames=filename)
+ return None
+
+
+def launch(url: str, wait: bool = False, locate: bool = False) -> int:
+ """This function launches the given URL (or filename) in the default
+ viewer application for this file type. If this is an executable, it
+ might launch the executable in a new session. The return value is
+ the exit code of the launched application. Usually, ``0`` indicates
+ success.
+
+ Examples::
+
+ click.launch('https://click.palletsprojects.com/')
+ click.launch('/my/downloaded/file', locate=True)
+
+ .. versionadded:: 2.0
+
+ :param url: URL or filename of the thing to launch.
+ :param wait: Wait for the program to exit before returning. This
+ only works if the launched program blocks. In particular,
+ ``xdg-open`` on Linux does not block.
+ :param locate: if this is set to `True` then instead of launching the
+ application associated with the URL it will attempt to
+ launch a file manager with the file located. This
+ might have weird effects if the URL does not point to
+ the filesystem.
+ """
+ from ._termui_impl import open_url
+
+ return open_url(url, wait=wait, locate=locate)
+
+
+# If this is provided, getchar() calls into this instead. This is used
+# for unittesting purposes.
+_getchar: t.Callable[[bool], str] | None = None
+
+
+def getchar(echo: bool = False) -> str:
+ """Fetches a single character from the terminal and returns it. This
+ will always return a unicode character and under certain rare
+ circumstances this might return more than one character. The
+ situations which more than one character is returned is when for
+ whatever reason multiple characters end up in the terminal buffer or
+ standard input was not actually a terminal.
+
+ Note that this will always read from the terminal, even if something
+ is piped into the standard input.
+
+ Note for Windows: in rare cases when typing non-ASCII characters, this
+ function might wait for a second character and then return both at once.
+ This is because certain Unicode characters look like special-key markers.
+
+ .. versionadded:: 2.0
+
+ :param echo: if set to `True`, the character read will also show up on
+ the terminal. The default is to not show it.
+ """
+ global _getchar
+
+ if _getchar is None:
+ from ._termui_impl import getchar as f
+
+ _getchar = f
+
+ return _getchar(echo)
+
+
+def raw_terminal() -> AbstractContextManager[int]:
+ from ._termui_impl import raw_terminal as f
+
+ return f()
+
+
+def pause(info: str | None = None, err: bool = False) -> None:
+ """This command stops execution and waits for the user to press any
+ key to continue. This is similar to the Windows batch "pause"
+ command. If the program is not run through a terminal, this command
+ will instead do nothing.
+
+ .. versionadded:: 2.0
+
+ .. versionadded:: 4.0
+ Added the `err` parameter.
+
+ :param info: The message to print before pausing. Defaults to
+ ``"Press any key to continue..."``.
+ :param err: if set to message goes to ``stderr`` instead of
+ ``stdout``, the same as with echo.
+ """
+ if not isatty(sys.stdin) or not isatty(sys.stdout):
+ return
+
+ if info is None:
+ info = _("Press any key to continue...")
+
+ try:
+ if info:
+ echo(info, nl=False, err=err)
+ try:
+ getchar()
+ except (KeyboardInterrupt, EOFError):
+ pass
+ finally:
+ if info:
+ echo(err=err)
diff --git a/Backend/venv/lib/python3.12/site-packages/click/testing.py b/Backend/venv/lib/python3.12/site-packages/click/testing.py
new file mode 100644
index 00000000..f6f60b80
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click/testing.py
@@ -0,0 +1,577 @@
+from __future__ import annotations
+
+import collections.abc as cabc
+import contextlib
+import io
+import os
+import shlex
+import sys
+import tempfile
+import typing as t
+from types import TracebackType
+
+from . import _compat
+from . import formatting
+from . import termui
+from . import utils
+from ._compat import _find_binary_reader
+
+if t.TYPE_CHECKING:
+ from _typeshed import ReadableBuffer
+
+ from .core import Command
+
+
+class EchoingStdin:
+ def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None:
+ self._input = input
+ self._output = output
+ self._paused = False
+
+ def __getattr__(self, x: str) -> t.Any:
+ return getattr(self._input, x)
+
+ def _echo(self, rv: bytes) -> bytes:
+ if not self._paused:
+ self._output.write(rv)
+
+ return rv
+
+ def read(self, n: int = -1) -> bytes:
+ return self._echo(self._input.read(n))
+
+ def read1(self, n: int = -1) -> bytes:
+ return self._echo(self._input.read1(n)) # type: ignore
+
+ def readline(self, n: int = -1) -> bytes:
+ return self._echo(self._input.readline(n))
+
+ def readlines(self) -> list[bytes]:
+ return [self._echo(x) for x in self._input.readlines()]
+
+ def __iter__(self) -> cabc.Iterator[bytes]:
+ return iter(self._echo(x) for x in self._input)
+
+ def __repr__(self) -> str:
+ return repr(self._input)
+
+
+@contextlib.contextmanager
+def _pause_echo(stream: EchoingStdin | None) -> cabc.Iterator[None]:
+ if stream is None:
+ yield
+ else:
+ stream._paused = True
+ yield
+ stream._paused = False
+
+
+class BytesIOCopy(io.BytesIO):
+ """Patch ``io.BytesIO`` to let the written stream be copied to another.
+
+ .. versionadded:: 8.2
+ """
+
+ def __init__(self, copy_to: io.BytesIO) -> None:
+ super().__init__()
+ self.copy_to = copy_to
+
+ def flush(self) -> None:
+ super().flush()
+ self.copy_to.flush()
+
+ def write(self, b: ReadableBuffer) -> int:
+ self.copy_to.write(b)
+ return super().write(b)
+
+
+class StreamMixer:
+ """Mixes `` and `` streams.
+
+ The result is available in the ``output`` attribute.
+
+ .. versionadded:: 8.2
+ """
+
+ def __init__(self) -> None:
+ self.output: io.BytesIO = io.BytesIO()
+ self.stdout: io.BytesIO = BytesIOCopy(copy_to=self.output)
+ self.stderr: io.BytesIO = BytesIOCopy(copy_to=self.output)
+
+ def __del__(self) -> None:
+ """
+ Guarantee that embedded file-like objects are closed in a
+ predictable order, protecting against races between
+ self.output being closed and other streams being flushed on close
+
+ .. versionadded:: 8.2.2
+ """
+ self.stderr.close()
+ self.stdout.close()
+ self.output.close()
+
+
+class _NamedTextIOWrapper(io.TextIOWrapper):
+ def __init__(
+ self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any
+ ) -> None:
+ super().__init__(buffer, **kwargs)
+ self._name = name
+ self._mode = mode
+
+ @property
+ def name(self) -> str:
+ return self._name
+
+ @property
+ def mode(self) -> str:
+ return self._mode
+
+
+def make_input_stream(
+ input: str | bytes | t.IO[t.Any] | None, charset: str
+) -> t.BinaryIO:
+ # Is already an input stream.
+ if hasattr(input, "read"):
+ rv = _find_binary_reader(t.cast("t.IO[t.Any]", input))
+
+ if rv is not None:
+ return rv
+
+ raise TypeError("Could not find binary reader for input stream.")
+
+ if input is None:
+ input = b""
+ elif isinstance(input, str):
+ input = input.encode(charset)
+
+ return io.BytesIO(input)
+
+
+class Result:
+ """Holds the captured result of an invoked CLI script.
+
+ :param runner: The runner that created the result
+ :param stdout_bytes: The standard output as bytes.
+ :param stderr_bytes: The standard error as bytes.
+ :param output_bytes: A mix of ``stdout_bytes`` and ``stderr_bytes``, as the
+ user would see it in its terminal.
+ :param return_value: The value returned from the invoked command.
+ :param exit_code: The exit code as integer.
+ :param exception: The exception that happened if one did.
+ :param exc_info: Exception information (exception type, exception instance,
+ traceback type).
+
+ .. versionchanged:: 8.2
+ ``stderr_bytes`` no longer optional, ``output_bytes`` introduced and
+ ``mix_stderr`` has been removed.
+
+ .. versionadded:: 8.0
+ Added ``return_value``.
+ """
+
+ def __init__(
+ self,
+ runner: CliRunner,
+ stdout_bytes: bytes,
+ stderr_bytes: bytes,
+ output_bytes: bytes,
+ return_value: t.Any,
+ exit_code: int,
+ exception: BaseException | None,
+ exc_info: tuple[type[BaseException], BaseException, TracebackType]
+ | None = None,
+ ):
+ self.runner = runner
+ self.stdout_bytes = stdout_bytes
+ self.stderr_bytes = stderr_bytes
+ self.output_bytes = output_bytes
+ self.return_value = return_value
+ self.exit_code = exit_code
+ self.exception = exception
+ self.exc_info = exc_info
+
+ @property
+ def output(self) -> str:
+ """The terminal output as unicode string, as the user would see it.
+
+ .. versionchanged:: 8.2
+ No longer a proxy for ``self.stdout``. Now has its own independent stream
+ that is mixing `` and ``, in the order they were written.
+ """
+ return self.output_bytes.decode(self.runner.charset, "replace").replace(
+ "\r\n", "\n"
+ )
+
+ @property
+ def stdout(self) -> str:
+ """The standard output as unicode string."""
+ return self.stdout_bytes.decode(self.runner.charset, "replace").replace(
+ "\r\n", "\n"
+ )
+
+ @property
+ def stderr(self) -> str:
+ """The standard error as unicode string.
+
+ .. versionchanged:: 8.2
+ No longer raise an exception, always returns the `` string.
+ """
+ return self.stderr_bytes.decode(self.runner.charset, "replace").replace(
+ "\r\n", "\n"
+ )
+
+ def __repr__(self) -> str:
+ exc_str = repr(self.exception) if self.exception else "okay"
+ return f"<{type(self).__name__} {exc_str}>"
+
+
+class CliRunner:
+ """The CLI runner provides functionality to invoke a Click command line
+ script for unittesting purposes in a isolated environment. This only
+ works in single-threaded systems without any concurrency as it changes the
+ global interpreter state.
+
+ :param charset: the character set for the input and output data.
+ :param env: a dictionary with environment variables for overriding.
+ :param echo_stdin: if this is set to `True`, then reading from `` writes
+ to ``. This is useful for showing examples in
+ some circumstances. Note that regular prompts
+ will automatically echo the input.
+ :param catch_exceptions: Whether to catch any exceptions other than
+ ``SystemExit`` when running :meth:`~CliRunner.invoke`.
+
+ .. versionchanged:: 8.2
+ Added the ``catch_exceptions`` parameter.
+
+ .. versionchanged:: 8.2
+ ``mix_stderr`` parameter has been removed.
+ """
+
+ def __init__(
+ self,
+ charset: str = "utf-8",
+ env: cabc.Mapping[str, str | None] | None = None,
+ echo_stdin: bool = False,
+ catch_exceptions: bool = True,
+ ) -> None:
+ self.charset = charset
+ self.env: cabc.Mapping[str, str | None] = env or {}
+ self.echo_stdin = echo_stdin
+ self.catch_exceptions = catch_exceptions
+
+ def get_default_prog_name(self, cli: Command) -> str:
+ """Given a command object it will return the default program name
+ for it. The default is the `name` attribute or ``"root"`` if not
+ set.
+ """
+ return cli.name or "root"
+
+ def make_env(
+ self, overrides: cabc.Mapping[str, str | None] | None = None
+ ) -> cabc.Mapping[str, str | None]:
+ """Returns the environment overrides for invoking a script."""
+ rv = dict(self.env)
+ if overrides:
+ rv.update(overrides)
+ return rv
+
+ @contextlib.contextmanager
+ def isolation(
+ self,
+ input: str | bytes | t.IO[t.Any] | None = None,
+ env: cabc.Mapping[str, str | None] | None = None,
+ color: bool = False,
+ ) -> cabc.Iterator[tuple[io.BytesIO, io.BytesIO, io.BytesIO]]:
+ """A context manager that sets up the isolation for invoking of a
+ command line tool. This sets up `` with the given input data
+ and `os.environ` with the overrides from the given dictionary.
+ This also rebinds some internals in Click to be mocked (like the
+ prompt functionality).
+
+ This is automatically done in the :meth:`invoke` method.
+
+ :param input: the input stream to put into `sys.stdin`.
+ :param env: the environment overrides as dictionary.
+ :param color: whether the output should contain color codes. The
+ application can still override this explicitly.
+
+ .. versionadded:: 8.2
+ An additional output stream is returned, which is a mix of
+ `` and `` streams.
+
+ .. versionchanged:: 8.2
+ Always returns the `` stream.
+
+ .. versionchanged:: 8.0
+ `` is opened with ``errors="backslashreplace"``
+ instead of the default ``"strict"``.
+
+ .. versionchanged:: 4.0
+ Added the ``color`` parameter.
+ """
+ bytes_input = make_input_stream(input, self.charset)
+ echo_input = None
+
+ old_stdin = sys.stdin
+ old_stdout = sys.stdout
+ old_stderr = sys.stderr
+ old_forced_width = formatting.FORCED_WIDTH
+ formatting.FORCED_WIDTH = 80
+
+ env = self.make_env(env)
+
+ stream_mixer = StreamMixer()
+
+ if self.echo_stdin:
+ bytes_input = echo_input = t.cast(
+ t.BinaryIO, EchoingStdin(bytes_input, stream_mixer.stdout)
+ )
+
+ sys.stdin = text_input = _NamedTextIOWrapper(
+ bytes_input, encoding=self.charset, name="", mode="r"
+ )
+
+ if self.echo_stdin:
+ # Force unbuffered reads, otherwise TextIOWrapper reads a
+ # large chunk which is echoed early.
+ text_input._CHUNK_SIZE = 1 # type: ignore
+
+ sys.stdout = _NamedTextIOWrapper(
+ stream_mixer.stdout, encoding=self.charset, name="", mode="w"
+ )
+
+ sys.stderr = _NamedTextIOWrapper(
+ stream_mixer.stderr,
+ encoding=self.charset,
+ name="",
+ mode="w",
+ errors="backslashreplace",
+ )
+
+ @_pause_echo(echo_input) # type: ignore
+ def visible_input(prompt: str | None = None) -> str:
+ sys.stdout.write(prompt or "")
+ try:
+ val = next(text_input).rstrip("\r\n")
+ except StopIteration as e:
+ raise EOFError() from e
+ sys.stdout.write(f"{val}\n")
+ sys.stdout.flush()
+ return val
+
+ @_pause_echo(echo_input) # type: ignore
+ def hidden_input(prompt: str | None = None) -> str:
+ sys.stdout.write(f"{prompt or ''}\n")
+ sys.stdout.flush()
+ try:
+ return next(text_input).rstrip("\r\n")
+ except StopIteration as e:
+ raise EOFError() from e
+
+ @_pause_echo(echo_input) # type: ignore
+ def _getchar(echo: bool) -> str:
+ char = sys.stdin.read(1)
+
+ if echo:
+ sys.stdout.write(char)
+
+ sys.stdout.flush()
+ return char
+
+ default_color = color
+
+ def should_strip_ansi(
+ stream: t.IO[t.Any] | None = None, color: bool | None = None
+ ) -> bool:
+ if color is None:
+ return not default_color
+ return not color
+
+ old_visible_prompt_func = termui.visible_prompt_func
+ old_hidden_prompt_func = termui.hidden_prompt_func
+ old__getchar_func = termui._getchar
+ old_should_strip_ansi = utils.should_strip_ansi # type: ignore
+ old__compat_should_strip_ansi = _compat.should_strip_ansi
+ termui.visible_prompt_func = visible_input
+ termui.hidden_prompt_func = hidden_input
+ termui._getchar = _getchar
+ utils.should_strip_ansi = should_strip_ansi # type: ignore
+ _compat.should_strip_ansi = should_strip_ansi
+
+ old_env = {}
+ try:
+ for key, value in env.items():
+ old_env[key] = os.environ.get(key)
+ if value is None:
+ try:
+ del os.environ[key]
+ except Exception:
+ pass
+ else:
+ os.environ[key] = value
+ yield (stream_mixer.stdout, stream_mixer.stderr, stream_mixer.output)
+ finally:
+ for key, value in old_env.items():
+ if value is None:
+ try:
+ del os.environ[key]
+ except Exception:
+ pass
+ else:
+ os.environ[key] = value
+ sys.stdout = old_stdout
+ sys.stderr = old_stderr
+ sys.stdin = old_stdin
+ termui.visible_prompt_func = old_visible_prompt_func
+ termui.hidden_prompt_func = old_hidden_prompt_func
+ termui._getchar = old__getchar_func
+ utils.should_strip_ansi = old_should_strip_ansi # type: ignore
+ _compat.should_strip_ansi = old__compat_should_strip_ansi
+ formatting.FORCED_WIDTH = old_forced_width
+
+ def invoke(
+ self,
+ cli: Command,
+ args: str | cabc.Sequence[str] | None = None,
+ input: str | bytes | t.IO[t.Any] | None = None,
+ env: cabc.Mapping[str, str | None] | None = None,
+ catch_exceptions: bool | None = None,
+ color: bool = False,
+ **extra: t.Any,
+ ) -> Result:
+ """Invokes a command in an isolated environment. The arguments are
+ forwarded directly to the command line script, the `extra` keyword
+ arguments are passed to the :meth:`~clickpkg.Command.main` function of
+ the command.
+
+ This returns a :class:`Result` object.
+
+ :param cli: the command to invoke
+ :param args: the arguments to invoke. It may be given as an iterable
+ or a string. When given as string it will be interpreted
+ as a Unix shell command. More details at
+ :func:`shlex.split`.
+ :param input: the input data for `sys.stdin`.
+ :param env: the environment overrides.
+ :param catch_exceptions: Whether to catch any other exceptions than
+ ``SystemExit``. If :data:`None`, the value
+ from :class:`CliRunner` is used.
+ :param extra: the keyword arguments to pass to :meth:`main`.
+ :param color: whether the output should contain color codes. The
+ application can still override this explicitly.
+
+ .. versionadded:: 8.2
+ The result object has the ``output_bytes`` attribute with
+ the mix of ``stdout_bytes`` and ``stderr_bytes``, as the user would
+ see it in its terminal.
+
+ .. versionchanged:: 8.2
+ The result object always returns the ``stderr_bytes`` stream.
+
+ .. versionchanged:: 8.0
+ The result object has the ``return_value`` attribute with
+ the value returned from the invoked command.
+
+ .. versionchanged:: 4.0
+ Added the ``color`` parameter.
+
+ .. versionchanged:: 3.0
+ Added the ``catch_exceptions`` parameter.
+
+ .. versionchanged:: 3.0
+ The result object has the ``exc_info`` attribute with the
+ traceback if available.
+ """
+ exc_info = None
+ if catch_exceptions is None:
+ catch_exceptions = self.catch_exceptions
+
+ with self.isolation(input=input, env=env, color=color) as outstreams:
+ return_value = None
+ exception: BaseException | None = None
+ exit_code = 0
+
+ if isinstance(args, str):
+ args = shlex.split(args)
+
+ try:
+ prog_name = extra.pop("prog_name")
+ except KeyError:
+ prog_name = self.get_default_prog_name(cli)
+
+ try:
+ return_value = cli.main(args=args or (), prog_name=prog_name, **extra)
+ except SystemExit as e:
+ exc_info = sys.exc_info()
+ e_code = t.cast("int | t.Any | None", e.code)
+
+ if e_code is None:
+ e_code = 0
+
+ if e_code != 0:
+ exception = e
+
+ if not isinstance(e_code, int):
+ sys.stdout.write(str(e_code))
+ sys.stdout.write("\n")
+ e_code = 1
+
+ exit_code = e_code
+
+ except Exception as e:
+ if not catch_exceptions:
+ raise
+ exception = e
+ exit_code = 1
+ exc_info = sys.exc_info()
+ finally:
+ sys.stdout.flush()
+ sys.stderr.flush()
+ stdout = outstreams[0].getvalue()
+ stderr = outstreams[1].getvalue()
+ output = outstreams[2].getvalue()
+
+ return Result(
+ runner=self,
+ stdout_bytes=stdout,
+ stderr_bytes=stderr,
+ output_bytes=output,
+ return_value=return_value,
+ exit_code=exit_code,
+ exception=exception,
+ exc_info=exc_info, # type: ignore
+ )
+
+ @contextlib.contextmanager
+ def isolated_filesystem(
+ self, temp_dir: str | os.PathLike[str] | None = None
+ ) -> cabc.Iterator[str]:
+ """A context manager that creates a temporary directory and
+ changes the current working directory to it. This isolates tests
+ that affect the contents of the CWD to prevent them from
+ interfering with each other.
+
+ :param temp_dir: Create the temporary directory under this
+ directory. If given, the created directory is not removed
+ when exiting.
+
+ .. versionchanged:: 8.0
+ Added the ``temp_dir`` parameter.
+ """
+ cwd = os.getcwd()
+ dt = tempfile.mkdtemp(dir=temp_dir)
+ os.chdir(dt)
+
+ try:
+ yield dt
+ finally:
+ os.chdir(cwd)
+
+ if temp_dir is None:
+ import shutil
+
+ try:
+ shutil.rmtree(dt)
+ except OSError:
+ pass
diff --git a/Backend/venv/lib/python3.12/site-packages/click/types.py b/Backend/venv/lib/python3.12/site-packages/click/types.py
new file mode 100644
index 00000000..e71c1c21
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click/types.py
@@ -0,0 +1,1209 @@
+from __future__ import annotations
+
+import collections.abc as cabc
+import enum
+import os
+import stat
+import sys
+import typing as t
+from datetime import datetime
+from gettext import gettext as _
+from gettext import ngettext
+
+from ._compat import _get_argv_encoding
+from ._compat import open_stream
+from .exceptions import BadParameter
+from .utils import format_filename
+from .utils import LazyFile
+from .utils import safecall
+
+if t.TYPE_CHECKING:
+ import typing_extensions as te
+
+ from .core import Context
+ from .core import Parameter
+ from .shell_completion import CompletionItem
+
+ParamTypeValue = t.TypeVar("ParamTypeValue")
+
+
+class ParamType:
+ """Represents the type of a parameter. Validates and converts values
+ from the command line or Python into the correct type.
+
+ To implement a custom type, subclass and implement at least the
+ following:
+
+ - The :attr:`name` class attribute must be set.
+ - Calling an instance of the type with ``None`` must return
+ ``None``. This is already implemented by default.
+ - :meth:`convert` must convert string values to the correct type.
+ - :meth:`convert` must accept values that are already the correct
+ type.
+ - It must be able to convert a value if the ``ctx`` and ``param``
+ arguments are ``None``. This can occur when converting prompt
+ input.
+ """
+
+ is_composite: t.ClassVar[bool] = False
+ arity: t.ClassVar[int] = 1
+
+ #: the descriptive name of this type
+ name: str
+
+ #: if a list of this type is expected and the value is pulled from a
+ #: string environment variable, this is what splits it up. `None`
+ #: means any whitespace. For all parameters the general rule is that
+ #: whitespace splits them up. The exception are paths and files which
+ #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on
+ #: Windows).
+ envvar_list_splitter: t.ClassVar[str | None] = None
+
+ def to_info_dict(self) -> dict[str, t.Any]:
+ """Gather information that could be useful for a tool generating
+ user-facing documentation.
+
+ Use :meth:`click.Context.to_info_dict` to traverse the entire
+ CLI structure.
+
+ .. versionadded:: 8.0
+ """
+ # The class name without the "ParamType" suffix.
+ param_type = type(self).__name__.partition("ParamType")[0]
+ param_type = param_type.partition("ParameterType")[0]
+
+ # Custom subclasses might not remember to set a name.
+ if hasattr(self, "name"):
+ name = self.name
+ else:
+ name = param_type
+
+ return {"param_type": param_type, "name": name}
+
+ def __call__(
+ self,
+ value: t.Any,
+ param: Parameter | None = None,
+ ctx: Context | None = None,
+ ) -> t.Any:
+ if value is not None:
+ return self.convert(value, param, ctx)
+
+ def get_metavar(self, param: Parameter, ctx: Context) -> str | None:
+ """Returns the metavar default for this param if it provides one."""
+
+ def get_missing_message(self, param: Parameter, ctx: Context | None) -> str | None:
+ """Optionally might return extra information about a missing
+ parameter.
+
+ .. versionadded:: 2.0
+ """
+
+ def convert(
+ self, value: t.Any, param: Parameter | None, ctx: Context | None
+ ) -> t.Any:
+ """Convert the value to the correct type. This is not called if
+ the value is ``None`` (the missing value).
+
+ This must accept string values from the command line, as well as
+ values that are already the correct type. It may also convert
+ other compatible types.
+
+ The ``param`` and ``ctx`` arguments may be ``None`` in certain
+ situations, such as when converting prompt input.
+
+ If the value cannot be converted, call :meth:`fail` with a
+ descriptive message.
+
+ :param value: The value to convert.
+ :param param: The parameter that is using this type to convert
+ its value. May be ``None``.
+ :param ctx: The current context that arrived at this value. May
+ be ``None``.
+ """
+ return value
+
+ def split_envvar_value(self, rv: str) -> cabc.Sequence[str]:
+ """Given a value from an environment variable this splits it up
+ into small chunks depending on the defined envvar list splitter.
+
+ If the splitter is set to `None`, which means that whitespace splits,
+ then leading and trailing whitespace is ignored. Otherwise, leading
+ and trailing splitters usually lead to empty items being included.
+ """
+ return (rv or "").split(self.envvar_list_splitter)
+
+ def fail(
+ self,
+ message: str,
+ param: Parameter | None = None,
+ ctx: Context | None = None,
+ ) -> t.NoReturn:
+ """Helper method to fail with an invalid value message."""
+ raise BadParameter(message, ctx=ctx, param=param)
+
+ def shell_complete(
+ self, ctx: Context, param: Parameter, incomplete: str
+ ) -> list[CompletionItem]:
+ """Return a list of
+ :class:`~click.shell_completion.CompletionItem` objects for the
+ incomplete value. Most types do not provide completions, but
+ some do, and this allows custom types to provide custom
+ completions as well.
+
+ :param ctx: Invocation context for this command.
+ :param param: The parameter that is requesting completion.
+ :param incomplete: Value being completed. May be empty.
+
+ .. versionadded:: 8.0
+ """
+ return []
+
+
+class CompositeParamType(ParamType):
+ is_composite = True
+
+ @property
+ def arity(self) -> int: # type: ignore
+ raise NotImplementedError()
+
+
+class FuncParamType(ParamType):
+ def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None:
+ self.name: str = func.__name__
+ self.func = func
+
+ def to_info_dict(self) -> dict[str, t.Any]:
+ info_dict = super().to_info_dict()
+ info_dict["func"] = self.func
+ return info_dict
+
+ def convert(
+ self, value: t.Any, param: Parameter | None, ctx: Context | None
+ ) -> t.Any:
+ try:
+ return self.func(value)
+ except ValueError:
+ try:
+ value = str(value)
+ except UnicodeError:
+ value = value.decode("utf-8", "replace")
+
+ self.fail(value, param, ctx)
+
+
+class UnprocessedParamType(ParamType):
+ name = "text"
+
+ def convert(
+ self, value: t.Any, param: Parameter | None, ctx: Context | None
+ ) -> t.Any:
+ return value
+
+ def __repr__(self) -> str:
+ return "UNPROCESSED"
+
+
+class StringParamType(ParamType):
+ name = "text"
+
+ def convert(
+ self, value: t.Any, param: Parameter | None, ctx: Context | None
+ ) -> t.Any:
+ if isinstance(value, bytes):
+ enc = _get_argv_encoding()
+ try:
+ value = value.decode(enc)
+ except UnicodeError:
+ fs_enc = sys.getfilesystemencoding()
+ if fs_enc != enc:
+ try:
+ value = value.decode(fs_enc)
+ except UnicodeError:
+ value = value.decode("utf-8", "replace")
+ else:
+ value = value.decode("utf-8", "replace")
+ return value
+ return str(value)
+
+ def __repr__(self) -> str:
+ return "STRING"
+
+
+class Choice(ParamType, t.Generic[ParamTypeValue]):
+ """The choice type allows a value to be checked against a fixed set
+ of supported values.
+
+ You may pass any iterable value which will be converted to a tuple
+ and thus will only be iterated once.
+
+ The resulting value will always be one of the originally passed choices.
+ See :meth:`normalize_choice` for more info on the mapping of strings
+ to choices. See :ref:`choice-opts` for an example.
+
+ :param case_sensitive: Set to false to make choices case
+ insensitive. Defaults to true.
+
+ .. versionchanged:: 8.2.0
+ Non-``str`` ``choices`` are now supported. It can additionally be any
+ iterable. Before you were not recommended to pass anything but a list or
+ tuple.
+
+ .. versionadded:: 8.2.0
+ Choice normalization can be overridden via :meth:`normalize_choice`.
+ """
+
+ name = "choice"
+
+ def __init__(
+ self, choices: cabc.Iterable[ParamTypeValue], case_sensitive: bool = True
+ ) -> None:
+ self.choices: cabc.Sequence[ParamTypeValue] = tuple(choices)
+ self.case_sensitive = case_sensitive
+
+ def to_info_dict(self) -> dict[str, t.Any]:
+ info_dict = super().to_info_dict()
+ info_dict["choices"] = self.choices
+ info_dict["case_sensitive"] = self.case_sensitive
+ return info_dict
+
+ def _normalized_mapping(
+ self, ctx: Context | None = None
+ ) -> cabc.Mapping[ParamTypeValue, str]:
+ """
+ Returns mapping where keys are the original choices and the values are
+ the normalized values that are accepted via the command line.
+
+ This is a simple wrapper around :meth:`normalize_choice`, use that
+ instead which is supported.
+ """
+ return {
+ choice: self.normalize_choice(
+ choice=choice,
+ ctx=ctx,
+ )
+ for choice in self.choices
+ }
+
+ def normalize_choice(self, choice: ParamTypeValue, ctx: Context | None) -> str:
+ """
+ Normalize a choice value, used to map a passed string to a choice.
+ Each choice must have a unique normalized value.
+
+ By default uses :meth:`Context.token_normalize_func` and if not case
+ sensitive, convert it to a casefolded value.
+
+ .. versionadded:: 8.2.0
+ """
+ normed_value = choice.name if isinstance(choice, enum.Enum) else str(choice)
+
+ if ctx is not None and ctx.token_normalize_func is not None:
+ normed_value = ctx.token_normalize_func(normed_value)
+
+ if not self.case_sensitive:
+ normed_value = normed_value.casefold()
+
+ return normed_value
+
+ def get_metavar(self, param: Parameter, ctx: Context) -> str | None:
+ if param.param_type_name == "option" and not param.show_choices: # type: ignore
+ choice_metavars = [
+ convert_type(type(choice)).name.upper() for choice in self.choices
+ ]
+ choices_str = "|".join([*dict.fromkeys(choice_metavars)])
+ else:
+ choices_str = "|".join(
+ [str(i) for i in self._normalized_mapping(ctx=ctx).values()]
+ )
+
+ # Use curly braces to indicate a required argument.
+ if param.required and param.param_type_name == "argument":
+ return f"{{{choices_str}}}"
+
+ # Use square braces to indicate an option or optional argument.
+ return f"[{choices_str}]"
+
+ def get_missing_message(self, param: Parameter, ctx: Context | None) -> str:
+ """
+ Message shown when no choice is passed.
+
+ .. versionchanged:: 8.2.0 Added ``ctx`` argument.
+ """
+ return _("Choose from:\n\t{choices}").format(
+ choices=",\n\t".join(self._normalized_mapping(ctx=ctx).values())
+ )
+
+ def convert(
+ self, value: t.Any, param: Parameter | None, ctx: Context | None
+ ) -> ParamTypeValue:
+ """
+ For a given value from the parser, normalize it and find its
+ matching normalized value in the list of choices. Then return the
+ matched "original" choice.
+ """
+ normed_value = self.normalize_choice(choice=value, ctx=ctx)
+ normalized_mapping = self._normalized_mapping(ctx=ctx)
+
+ try:
+ return next(
+ original
+ for original, normalized in normalized_mapping.items()
+ if normalized == normed_value
+ )
+ except StopIteration:
+ self.fail(
+ self.get_invalid_choice_message(value=value, ctx=ctx),
+ param=param,
+ ctx=ctx,
+ )
+
+ def get_invalid_choice_message(self, value: t.Any, ctx: Context | None) -> str:
+ """Get the error message when the given choice is invalid.
+
+ :param value: The invalid value.
+
+ .. versionadded:: 8.2
+ """
+ choices_str = ", ".join(map(repr, self._normalized_mapping(ctx=ctx).values()))
+ return ngettext(
+ "{value!r} is not {choice}.",
+ "{value!r} is not one of {choices}.",
+ len(self.choices),
+ ).format(value=value, choice=choices_str, choices=choices_str)
+
+ def __repr__(self) -> str:
+ return f"Choice({list(self.choices)})"
+
+ def shell_complete(
+ self, ctx: Context, param: Parameter, incomplete: str
+ ) -> list[CompletionItem]:
+ """Complete choices that start with the incomplete value.
+
+ :param ctx: Invocation context for this command.
+ :param param: The parameter that is requesting completion.
+ :param incomplete: Value being completed. May be empty.
+
+ .. versionadded:: 8.0
+ """
+ from click.shell_completion import CompletionItem
+
+ str_choices = map(str, self.choices)
+
+ if self.case_sensitive:
+ matched = (c for c in str_choices if c.startswith(incomplete))
+ else:
+ incomplete = incomplete.lower()
+ matched = (c for c in str_choices if c.lower().startswith(incomplete))
+
+ return [CompletionItem(c) for c in matched]
+
+
+class DateTime(ParamType):
+ """The DateTime type converts date strings into `datetime` objects.
+
+ The format strings which are checked are configurable, but default to some
+ common (non-timezone aware) ISO 8601 formats.
+
+ When specifying *DateTime* formats, you should only pass a list or a tuple.
+ Other iterables, like generators, may lead to surprising results.
+
+ The format strings are processed using ``datetime.strptime``, and this
+ consequently defines the format strings which are allowed.
+
+ Parsing is tried using each format, in order, and the first format which
+ parses successfully is used.
+
+ :param formats: A list or tuple of date format strings, in the order in
+ which they should be tried. Defaults to
+ ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``,
+ ``'%Y-%m-%d %H:%M:%S'``.
+ """
+
+ name = "datetime"
+
+ def __init__(self, formats: cabc.Sequence[str] | None = None):
+ self.formats: cabc.Sequence[str] = formats or [
+ "%Y-%m-%d",
+ "%Y-%m-%dT%H:%M:%S",
+ "%Y-%m-%d %H:%M:%S",
+ ]
+
+ def to_info_dict(self) -> dict[str, t.Any]:
+ info_dict = super().to_info_dict()
+ info_dict["formats"] = self.formats
+ return info_dict
+
+ def get_metavar(self, param: Parameter, ctx: Context) -> str | None:
+ return f"[{'|'.join(self.formats)}]"
+
+ def _try_to_convert_date(self, value: t.Any, format: str) -> datetime | None:
+ try:
+ return datetime.strptime(value, format)
+ except ValueError:
+ return None
+
+ def convert(
+ self, value: t.Any, param: Parameter | None, ctx: Context | None
+ ) -> t.Any:
+ if isinstance(value, datetime):
+ return value
+
+ for format in self.formats:
+ converted = self._try_to_convert_date(value, format)
+
+ if converted is not None:
+ return converted
+
+ formats_str = ", ".join(map(repr, self.formats))
+ self.fail(
+ ngettext(
+ "{value!r} does not match the format {format}.",
+ "{value!r} does not match the formats {formats}.",
+ len(self.formats),
+ ).format(value=value, format=formats_str, formats=formats_str),
+ param,
+ ctx,
+ )
+
+ def __repr__(self) -> str:
+ return "DateTime"
+
+
+class _NumberParamTypeBase(ParamType):
+ _number_class: t.ClassVar[type[t.Any]]
+
+ def convert(
+ self, value: t.Any, param: Parameter | None, ctx: Context | None
+ ) -> t.Any:
+ try:
+ return self._number_class(value)
+ except ValueError:
+ self.fail(
+ _("{value!r} is not a valid {number_type}.").format(
+ value=value, number_type=self.name
+ ),
+ param,
+ ctx,
+ )
+
+
+class _NumberRangeBase(_NumberParamTypeBase):
+ def __init__(
+ self,
+ min: float | None = None,
+ max: float | None = None,
+ min_open: bool = False,
+ max_open: bool = False,
+ clamp: bool = False,
+ ) -> None:
+ self.min = min
+ self.max = max
+ self.min_open = min_open
+ self.max_open = max_open
+ self.clamp = clamp
+
+ def to_info_dict(self) -> dict[str, t.Any]:
+ info_dict = super().to_info_dict()
+ info_dict.update(
+ min=self.min,
+ max=self.max,
+ min_open=self.min_open,
+ max_open=self.max_open,
+ clamp=self.clamp,
+ )
+ return info_dict
+
+ def convert(
+ self, value: t.Any, param: Parameter | None, ctx: Context | None
+ ) -> t.Any:
+ import operator
+
+ rv = super().convert(value, param, ctx)
+ lt_min: bool = self.min is not None and (
+ operator.le if self.min_open else operator.lt
+ )(rv, self.min)
+ gt_max: bool = self.max is not None and (
+ operator.ge if self.max_open else operator.gt
+ )(rv, self.max)
+
+ if self.clamp:
+ if lt_min:
+ return self._clamp(self.min, 1, self.min_open) # type: ignore
+
+ if gt_max:
+ return self._clamp(self.max, -1, self.max_open) # type: ignore
+
+ if lt_min or gt_max:
+ self.fail(
+ _("{value} is not in the range {range}.").format(
+ value=rv, range=self._describe_range()
+ ),
+ param,
+ ctx,
+ )
+
+ return rv
+
+ def _clamp(self, bound: float, dir: t.Literal[1, -1], open: bool) -> float:
+ """Find the valid value to clamp to bound in the given
+ direction.
+
+ :param bound: The boundary value.
+ :param dir: 1 or -1 indicating the direction to move.
+ :param open: If true, the range does not include the bound.
+ """
+ raise NotImplementedError
+
+ def _describe_range(self) -> str:
+ """Describe the range for use in help text."""
+ if self.min is None:
+ op = "<" if self.max_open else "<="
+ return f"x{op}{self.max}"
+
+ if self.max is None:
+ op = ">" if self.min_open else ">="
+ return f"x{op}{self.min}"
+
+ lop = "<" if self.min_open else "<="
+ rop = "<" if self.max_open else "<="
+ return f"{self.min}{lop}x{rop}{self.max}"
+
+ def __repr__(self) -> str:
+ clamp = " clamped" if self.clamp else ""
+ return f"<{type(self).__name__} {self._describe_range()}{clamp}>"
+
+
+class IntParamType(_NumberParamTypeBase):
+ name = "integer"
+ _number_class = int
+
+ def __repr__(self) -> str:
+ return "INT"
+
+
+class IntRange(_NumberRangeBase, IntParamType):
+ """Restrict an :data:`click.INT` value to a range of accepted
+ values. See :ref:`ranges`.
+
+ If ``min`` or ``max`` are not passed, any value is accepted in that
+ direction. If ``min_open`` or ``max_open`` are enabled, the
+ corresponding boundary is not included in the range.
+
+ If ``clamp`` is enabled, a value outside the range is clamped to the
+ boundary instead of failing.
+
+ .. versionchanged:: 8.0
+ Added the ``min_open`` and ``max_open`` parameters.
+ """
+
+ name = "integer range"
+
+ def _clamp( # type: ignore
+ self, bound: int, dir: t.Literal[1, -1], open: bool
+ ) -> int:
+ if not open:
+ return bound
+
+ return bound + dir
+
+
+class FloatParamType(_NumberParamTypeBase):
+ name = "float"
+ _number_class = float
+
+ def __repr__(self) -> str:
+ return "FLOAT"
+
+
+class FloatRange(_NumberRangeBase, FloatParamType):
+ """Restrict a :data:`click.FLOAT` value to a range of accepted
+ values. See :ref:`ranges`.
+
+ If ``min`` or ``max`` are not passed, any value is accepted in that
+ direction. If ``min_open`` or ``max_open`` are enabled, the
+ corresponding boundary is not included in the range.
+
+ If ``clamp`` is enabled, a value outside the range is clamped to the
+ boundary instead of failing. This is not supported if either
+ boundary is marked ``open``.
+
+ .. versionchanged:: 8.0
+ Added the ``min_open`` and ``max_open`` parameters.
+ """
+
+ name = "float range"
+
+ def __init__(
+ self,
+ min: float | None = None,
+ max: float | None = None,
+ min_open: bool = False,
+ max_open: bool = False,
+ clamp: bool = False,
+ ) -> None:
+ super().__init__(
+ min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp
+ )
+
+ if (min_open or max_open) and clamp:
+ raise TypeError("Clamping is not supported for open bounds.")
+
+ def _clamp(self, bound: float, dir: t.Literal[1, -1], open: bool) -> float:
+ if not open:
+ return bound
+
+ # Could use math.nextafter here, but clamping an
+ # open float range doesn't seem to be particularly useful. It's
+ # left up to the user to write a callback to do it if needed.
+ raise RuntimeError("Clamping is not supported for open bounds.")
+
+
+class BoolParamType(ParamType):
+ name = "boolean"
+
+ bool_states: dict[str, bool] = {
+ "1": True,
+ "0": False,
+ "yes": True,
+ "no": False,
+ "true": True,
+ "false": False,
+ "on": True,
+ "off": False,
+ "t": True,
+ "f": False,
+ "y": True,
+ "n": False,
+ # Absence of value is considered False.
+ "": False,
+ }
+ """A mapping of string values to boolean states.
+
+ Mapping is inspired by :py:attr:`configparser.ConfigParser.BOOLEAN_STATES`
+ and extends it.
+
+ .. caution::
+ String values are lower-cased, as the ``str_to_bool`` comparison function
+ below is case-insensitive.
+
+ .. warning::
+ The mapping is not exhaustive, and does not cover all possible boolean strings
+ representations. It will remains as it is to avoid endless bikeshedding.
+
+ Future work my be considered to make this mapping user-configurable from public
+ API.
+ """
+
+ @staticmethod
+ def str_to_bool(value: str | bool) -> bool | None:
+ """Convert a string to a boolean value.
+
+ If the value is already a boolean, it is returned as-is. If the value is a
+ string, it is stripped of whitespaces and lower-cased, then checked against
+ the known boolean states pre-defined in the `BoolParamType.bool_states` mapping
+ above.
+
+ Returns `None` if the value does not match any known boolean state.
+ """
+ if isinstance(value, bool):
+ return value
+ return BoolParamType.bool_states.get(value.strip().lower())
+
+ def convert(
+ self, value: t.Any, param: Parameter | None, ctx: Context | None
+ ) -> bool:
+ normalized = self.str_to_bool(value)
+ if normalized is None:
+ self.fail(
+ _(
+ "{value!r} is not a valid boolean. Recognized values: {states}"
+ ).format(value=value, states=", ".join(sorted(self.bool_states))),
+ param,
+ ctx,
+ )
+ return normalized
+
+ def __repr__(self) -> str:
+ return "BOOL"
+
+
+class UUIDParameterType(ParamType):
+ name = "uuid"
+
+ def convert(
+ self, value: t.Any, param: Parameter | None, ctx: Context | None
+ ) -> t.Any:
+ import uuid
+
+ if isinstance(value, uuid.UUID):
+ return value
+
+ value = value.strip()
+
+ try:
+ return uuid.UUID(value)
+ except ValueError:
+ self.fail(
+ _("{value!r} is not a valid UUID.").format(value=value), param, ctx
+ )
+
+ def __repr__(self) -> str:
+ return "UUID"
+
+
+class File(ParamType):
+ """Declares a parameter to be a file for reading or writing. The file
+ is automatically closed once the context tears down (after the command
+ finished working).
+
+ Files can be opened for reading or writing. The special value ``-``
+ indicates stdin or stdout depending on the mode.
+
+ By default, the file is opened for reading text data, but it can also be
+ opened in binary mode or for writing. The encoding parameter can be used
+ to force a specific encoding.
+
+ The `lazy` flag controls if the file should be opened immediately or upon
+ first IO. The default is to be non-lazy for standard input and output
+ streams as well as files opened for reading, `lazy` otherwise. When opening a
+ file lazily for reading, it is still opened temporarily for validation, but
+ will not be held open until first IO. lazy is mainly useful when opening
+ for writing to avoid creating the file until it is needed.
+
+ Files can also be opened atomically in which case all writes go into a
+ separate file in the same folder and upon completion the file will
+ be moved over to the original location. This is useful if a file
+ regularly read by other users is modified.
+
+ See :ref:`file-args` for more information.
+
+ .. versionchanged:: 2.0
+ Added the ``atomic`` parameter.
+ """
+
+ name = "filename"
+ envvar_list_splitter: t.ClassVar[str] = os.path.pathsep
+
+ def __init__(
+ self,
+ mode: str = "r",
+ encoding: str | None = None,
+ errors: str | None = "strict",
+ lazy: bool | None = None,
+ atomic: bool = False,
+ ) -> None:
+ self.mode = mode
+ self.encoding = encoding
+ self.errors = errors
+ self.lazy = lazy
+ self.atomic = atomic
+
+ def to_info_dict(self) -> dict[str, t.Any]:
+ info_dict = super().to_info_dict()
+ info_dict.update(mode=self.mode, encoding=self.encoding)
+ return info_dict
+
+ def resolve_lazy_flag(self, value: str | os.PathLike[str]) -> bool:
+ if self.lazy is not None:
+ return self.lazy
+ if os.fspath(value) == "-":
+ return False
+ elif "w" in self.mode:
+ return True
+ return False
+
+ def convert(
+ self,
+ value: str | os.PathLike[str] | t.IO[t.Any],
+ param: Parameter | None,
+ ctx: Context | None,
+ ) -> t.IO[t.Any]:
+ if _is_file_like(value):
+ return value
+
+ value = t.cast("str | os.PathLike[str]", value)
+
+ try:
+ lazy = self.resolve_lazy_flag(value)
+
+ if lazy:
+ lf = LazyFile(
+ value, self.mode, self.encoding, self.errors, atomic=self.atomic
+ )
+
+ if ctx is not None:
+ ctx.call_on_close(lf.close_intelligently)
+
+ return t.cast("t.IO[t.Any]", lf)
+
+ f, should_close = open_stream(
+ value, self.mode, self.encoding, self.errors, atomic=self.atomic
+ )
+
+ # If a context is provided, we automatically close the file
+ # at the end of the context execution (or flush out). If a
+ # context does not exist, it's the caller's responsibility to
+ # properly close the file. This for instance happens when the
+ # type is used with prompts.
+ if ctx is not None:
+ if should_close:
+ ctx.call_on_close(safecall(f.close))
+ else:
+ ctx.call_on_close(safecall(f.flush))
+
+ return f
+ except OSError as e:
+ self.fail(f"'{format_filename(value)}': {e.strerror}", param, ctx)
+
+ def shell_complete(
+ self, ctx: Context, param: Parameter, incomplete: str
+ ) -> list[CompletionItem]:
+ """Return a special completion marker that tells the completion
+ system to use the shell to provide file path completions.
+
+ :param ctx: Invocation context for this command.
+ :param param: The parameter that is requesting completion.
+ :param incomplete: Value being completed. May be empty.
+
+ .. versionadded:: 8.0
+ """
+ from click.shell_completion import CompletionItem
+
+ return [CompletionItem(incomplete, type="file")]
+
+
+def _is_file_like(value: t.Any) -> te.TypeGuard[t.IO[t.Any]]:
+ return hasattr(value, "read") or hasattr(value, "write")
+
+
+class Path(ParamType):
+ """The ``Path`` type is similar to the :class:`File` type, but
+ returns the filename instead of an open file. Various checks can be
+ enabled to validate the type of file and permissions.
+
+ :param exists: The file or directory needs to exist for the value to
+ be valid. If this is not set to ``True``, and the file does not
+ exist, then all further checks are silently skipped.
+ :param file_okay: Allow a file as a value.
+ :param dir_okay: Allow a directory as a value.
+ :param readable: if true, a readable check is performed.
+ :param writable: if true, a writable check is performed.
+ :param executable: if true, an executable check is performed.
+ :param resolve_path: Make the value absolute and resolve any
+ symlinks. A ``~`` is not expanded, as this is supposed to be
+ done by the shell only.
+ :param allow_dash: Allow a single dash as a value, which indicates
+ a standard stream (but does not open it). Use
+ :func:`~click.open_file` to handle opening this value.
+ :param path_type: Convert the incoming path value to this type. If
+ ``None``, keep Python's default, which is ``str``. Useful to
+ convert to :class:`pathlib.Path`.
+
+ .. versionchanged:: 8.1
+ Added the ``executable`` parameter.
+
+ .. versionchanged:: 8.0
+ Allow passing ``path_type=pathlib.Path``.
+
+ .. versionchanged:: 6.0
+ Added the ``allow_dash`` parameter.
+ """
+
+ envvar_list_splitter: t.ClassVar[str] = os.path.pathsep
+
+ def __init__(
+ self,
+ exists: bool = False,
+ file_okay: bool = True,
+ dir_okay: bool = True,
+ writable: bool = False,
+ readable: bool = True,
+ resolve_path: bool = False,
+ allow_dash: bool = False,
+ path_type: type[t.Any] | None = None,
+ executable: bool = False,
+ ):
+ self.exists = exists
+ self.file_okay = file_okay
+ self.dir_okay = dir_okay
+ self.readable = readable
+ self.writable = writable
+ self.executable = executable
+ self.resolve_path = resolve_path
+ self.allow_dash = allow_dash
+ self.type = path_type
+
+ if self.file_okay and not self.dir_okay:
+ self.name: str = _("file")
+ elif self.dir_okay and not self.file_okay:
+ self.name = _("directory")
+ else:
+ self.name = _("path")
+
+ def to_info_dict(self) -> dict[str, t.Any]:
+ info_dict = super().to_info_dict()
+ info_dict.update(
+ exists=self.exists,
+ file_okay=self.file_okay,
+ dir_okay=self.dir_okay,
+ writable=self.writable,
+ readable=self.readable,
+ allow_dash=self.allow_dash,
+ )
+ return info_dict
+
+ def coerce_path_result(
+ self, value: str | os.PathLike[str]
+ ) -> str | bytes | os.PathLike[str]:
+ if self.type is not None and not isinstance(value, self.type):
+ if self.type is str:
+ return os.fsdecode(value)
+ elif self.type is bytes:
+ return os.fsencode(value)
+ else:
+ return t.cast("os.PathLike[str]", self.type(value))
+
+ return value
+
+ def convert(
+ self,
+ value: str | os.PathLike[str],
+ param: Parameter | None,
+ ctx: Context | None,
+ ) -> str | bytes | os.PathLike[str]:
+ rv = value
+
+ is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-")
+
+ if not is_dash:
+ if self.resolve_path:
+ rv = os.path.realpath(rv)
+
+ try:
+ st = os.stat(rv)
+ except OSError:
+ if not self.exists:
+ return self.coerce_path_result(rv)
+ self.fail(
+ _("{name} {filename!r} does not exist.").format(
+ name=self.name.title(), filename=format_filename(value)
+ ),
+ param,
+ ctx,
+ )
+
+ if not self.file_okay and stat.S_ISREG(st.st_mode):
+ self.fail(
+ _("{name} {filename!r} is a file.").format(
+ name=self.name.title(), filename=format_filename(value)
+ ),
+ param,
+ ctx,
+ )
+ if not self.dir_okay and stat.S_ISDIR(st.st_mode):
+ self.fail(
+ _("{name} {filename!r} is a directory.").format(
+ name=self.name.title(), filename=format_filename(value)
+ ),
+ param,
+ ctx,
+ )
+
+ if self.readable and not os.access(rv, os.R_OK):
+ self.fail(
+ _("{name} {filename!r} is not readable.").format(
+ name=self.name.title(), filename=format_filename(value)
+ ),
+ param,
+ ctx,
+ )
+
+ if self.writable and not os.access(rv, os.W_OK):
+ self.fail(
+ _("{name} {filename!r} is not writable.").format(
+ name=self.name.title(), filename=format_filename(value)
+ ),
+ param,
+ ctx,
+ )
+
+ if self.executable and not os.access(value, os.X_OK):
+ self.fail(
+ _("{name} {filename!r} is not executable.").format(
+ name=self.name.title(), filename=format_filename(value)
+ ),
+ param,
+ ctx,
+ )
+
+ return self.coerce_path_result(rv)
+
+ def shell_complete(
+ self, ctx: Context, param: Parameter, incomplete: str
+ ) -> list[CompletionItem]:
+ """Return a special completion marker that tells the completion
+ system to use the shell to provide path completions for only
+ directories or any paths.
+
+ :param ctx: Invocation context for this command.
+ :param param: The parameter that is requesting completion.
+ :param incomplete: Value being completed. May be empty.
+
+ .. versionadded:: 8.0
+ """
+ from click.shell_completion import CompletionItem
+
+ type = "dir" if self.dir_okay and not self.file_okay else "file"
+ return [CompletionItem(incomplete, type=type)]
+
+
+class Tuple(CompositeParamType):
+ """The default behavior of Click is to apply a type on a value directly.
+ This works well in most cases, except for when `nargs` is set to a fixed
+ count and different types should be used for different items. In this
+ case the :class:`Tuple` type can be used. This type can only be used
+ if `nargs` is set to a fixed number.
+
+ For more information see :ref:`tuple-type`.
+
+ This can be selected by using a Python tuple literal as a type.
+
+ :param types: a list of types that should be used for the tuple items.
+ """
+
+ def __init__(self, types: cabc.Sequence[type[t.Any] | ParamType]) -> None:
+ self.types: cabc.Sequence[ParamType] = [convert_type(ty) for ty in types]
+
+ def to_info_dict(self) -> dict[str, t.Any]:
+ info_dict = super().to_info_dict()
+ info_dict["types"] = [t.to_info_dict() for t in self.types]
+ return info_dict
+
+ @property
+ def name(self) -> str: # type: ignore
+ return f"<{' '.join(ty.name for ty in self.types)}>"
+
+ @property
+ def arity(self) -> int: # type: ignore
+ return len(self.types)
+
+ def convert(
+ self, value: t.Any, param: Parameter | None, ctx: Context | None
+ ) -> t.Any:
+ len_type = len(self.types)
+ len_value = len(value)
+
+ if len_value != len_type:
+ self.fail(
+ ngettext(
+ "{len_type} values are required, but {len_value} was given.",
+ "{len_type} values are required, but {len_value} were given.",
+ len_value,
+ ).format(len_type=len_type, len_value=len_value),
+ param=param,
+ ctx=ctx,
+ )
+
+ return tuple(
+ ty(x, param, ctx) for ty, x in zip(self.types, value, strict=False)
+ )
+
+
+def convert_type(ty: t.Any | None, default: t.Any | None = None) -> ParamType:
+ """Find the most appropriate :class:`ParamType` for the given Python
+ type. If the type isn't provided, it can be inferred from a default
+ value.
+ """
+ guessed_type = False
+
+ if ty is None and default is not None:
+ if isinstance(default, (tuple, list)):
+ # If the default is empty, ty will remain None and will
+ # return STRING.
+ if default:
+ item = default[0]
+
+ # A tuple of tuples needs to detect the inner types.
+ # Can't call convert recursively because that would
+ # incorrectly unwind the tuple to a single type.
+ if isinstance(item, (tuple, list)):
+ ty = tuple(map(type, item))
+ else:
+ ty = type(item)
+ else:
+ ty = type(default)
+
+ guessed_type = True
+
+ if isinstance(ty, tuple):
+ return Tuple(ty)
+
+ if isinstance(ty, ParamType):
+ return ty
+
+ if ty is str or ty is None:
+ return STRING
+
+ if ty is int:
+ return INT
+
+ if ty is float:
+ return FLOAT
+
+ if ty is bool:
+ return BOOL
+
+ if guessed_type:
+ return STRING
+
+ if __debug__:
+ try:
+ if issubclass(ty, ParamType):
+ raise AssertionError(
+ f"Attempted to use an uninstantiated parameter type ({ty})."
+ )
+ except TypeError:
+ # ty is an instance (correct), so issubclass fails.
+ pass
+
+ return FuncParamType(ty)
+
+
+#: A dummy parameter type that just does nothing. From a user's
+#: perspective this appears to just be the same as `STRING` but
+#: internally no string conversion takes place if the input was bytes.
+#: This is usually useful when working with file paths as they can
+#: appear in bytes and unicode.
+#:
+#: For path related uses the :class:`Path` type is a better choice but
+#: there are situations where an unprocessed type is useful which is why
+#: it is is provided.
+#:
+#: .. versionadded:: 4.0
+UNPROCESSED = UnprocessedParamType()
+
+#: A unicode string parameter type which is the implicit default. This
+#: can also be selected by using ``str`` as type.
+STRING = StringParamType()
+
+#: An integer parameter. This can also be selected by using ``int`` as
+#: type.
+INT = IntParamType()
+
+#: A floating point value parameter. This can also be selected by using
+#: ``float`` as type.
+FLOAT = FloatParamType()
+
+#: A boolean parameter. This is the default for boolean flags. This can
+#: also be selected by using ``bool`` as a type.
+BOOL = BoolParamType()
+
+#: A UUID parameter.
+UUID = UUIDParameterType()
+
+
+class OptionHelpExtra(t.TypedDict, total=False):
+ envvars: tuple[str, ...]
+ default: str
+ range: str
+ required: str
diff --git a/Backend/venv/lib/python3.12/site-packages/click/utils.py b/Backend/venv/lib/python3.12/site-packages/click/utils.py
new file mode 100644
index 00000000..beae26f7
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/click/utils.py
@@ -0,0 +1,627 @@
+from __future__ import annotations
+
+import collections.abc as cabc
+import os
+import re
+import sys
+import typing as t
+from functools import update_wrapper
+from types import ModuleType
+from types import TracebackType
+
+from ._compat import _default_text_stderr
+from ._compat import _default_text_stdout
+from ._compat import _find_binary_writer
+from ._compat import auto_wrap_for_ansi
+from ._compat import binary_streams
+from ._compat import open_stream
+from ._compat import should_strip_ansi
+from ._compat import strip_ansi
+from ._compat import text_streams
+from ._compat import WIN
+from .globals import resolve_color_default
+
+if t.TYPE_CHECKING:
+ import typing_extensions as te
+
+ P = te.ParamSpec("P")
+
+R = t.TypeVar("R")
+
+
+def _posixify(name: str) -> str:
+ return "-".join(name.split()).lower()
+
+
+def safecall(func: t.Callable[P, R]) -> t.Callable[P, R | None]:
+ """Wraps a function so that it swallows exceptions."""
+
+ def wrapper(*args: P.args, **kwargs: P.kwargs) -> R | None:
+ try:
+ return func(*args, **kwargs)
+ except Exception:
+ pass
+ return None
+
+ return update_wrapper(wrapper, func)
+
+
+def make_str(value: t.Any) -> str:
+ """Converts a value into a valid string."""
+ if isinstance(value, bytes):
+ try:
+ return value.decode(sys.getfilesystemencoding())
+ except UnicodeError:
+ return value.decode("utf-8", "replace")
+ return str(value)
+
+
+def make_default_short_help(help: str, max_length: int = 45) -> str:
+ """Returns a condensed version of help string."""
+ # Consider only the first paragraph.
+ paragraph_end = help.find("\n\n")
+
+ if paragraph_end != -1:
+ help = help[:paragraph_end]
+
+ # Collapse newlines, tabs, and spaces.
+ words = help.split()
+
+ if not words:
+ return ""
+
+ # The first paragraph started with a "no rewrap" marker, ignore it.
+ if words[0] == "\b":
+ words = words[1:]
+
+ total_length = 0
+ last_index = len(words) - 1
+
+ for i, word in enumerate(words):
+ total_length += len(word) + (i > 0)
+
+ if total_length > max_length: # too long, truncate
+ break
+
+ if word[-1] == ".": # sentence end, truncate without "..."
+ return " ".join(words[: i + 1])
+
+ if total_length == max_length and i != last_index:
+ break # not at sentence end, truncate with "..."
+ else:
+ return " ".join(words) # no truncation needed
+
+ # Account for the length of the suffix.
+ total_length += len("...")
+
+ # remove words until the length is short enough
+ while i > 0:
+ total_length -= len(words[i]) + (i > 0)
+
+ if total_length <= max_length:
+ break
+
+ i -= 1
+
+ return " ".join(words[:i]) + "..."
+
+
+class LazyFile:
+ """A lazy file works like a regular file but it does not fully open
+ the file but it does perform some basic checks early to see if the
+ filename parameter does make sense. This is useful for safely opening
+ files for writing.
+ """
+
+ def __init__(
+ self,
+ filename: str | os.PathLike[str],
+ mode: str = "r",
+ encoding: str | None = None,
+ errors: str | None = "strict",
+ atomic: bool = False,
+ ):
+ self.name: str = os.fspath(filename)
+ self.mode = mode
+ self.encoding = encoding
+ self.errors = errors
+ self.atomic = atomic
+ self._f: t.IO[t.Any] | None
+ self.should_close: bool
+
+ if self.name == "-":
+ self._f, self.should_close = open_stream(filename, mode, encoding, errors)
+ else:
+ if "r" in mode:
+ # Open and close the file in case we're opening it for
+ # reading so that we can catch at least some errors in
+ # some cases early.
+ open(filename, mode).close()
+ self._f = None
+ self.should_close = True
+
+ def __getattr__(self, name: str) -> t.Any:
+ return getattr(self.open(), name)
+
+ def __repr__(self) -> str:
+ if self._f is not None:
+ return repr(self._f)
+ return f""
+
+ def open(self) -> t.IO[t.Any]:
+ """Opens the file if it's not yet open. This call might fail with
+ a :exc:`FileError`. Not handling this error will produce an error
+ that Click shows.
+ """
+ if self._f is not None:
+ return self._f
+ try:
+ rv, self.should_close = open_stream(
+ self.name, self.mode, self.encoding, self.errors, atomic=self.atomic
+ )
+ except OSError as e:
+ from .exceptions import FileError
+
+ raise FileError(self.name, hint=e.strerror) from e
+ self._f = rv
+ return rv
+
+ def close(self) -> None:
+ """Closes the underlying file, no matter what."""
+ if self._f is not None:
+ self._f.close()
+
+ def close_intelligently(self) -> None:
+ """This function only closes the file if it was opened by the lazy
+ file wrapper. For instance this will never close stdin.
+ """
+ if self.should_close:
+ self.close()
+
+ def __enter__(self) -> LazyFile:
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ tb: TracebackType | None,
+ ) -> None:
+ self.close_intelligently()
+
+ def __iter__(self) -> cabc.Iterator[t.AnyStr]:
+ self.open()
+ return iter(self._f) # type: ignore
+
+
+class KeepOpenFile:
+ def __init__(self, file: t.IO[t.Any]) -> None:
+ self._file: t.IO[t.Any] = file
+
+ def __getattr__(self, name: str) -> t.Any:
+ return getattr(self._file, name)
+
+ def __enter__(self) -> KeepOpenFile:
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ tb: TracebackType | None,
+ ) -> None:
+ pass
+
+ def __repr__(self) -> str:
+ return repr(self._file)
+
+ def __iter__(self) -> cabc.Iterator[t.AnyStr]:
+ return iter(self._file)
+
+
+def echo(
+ message: t.Any | None = None,
+ file: t.IO[t.Any] | None = None,
+ nl: bool = True,
+ err: bool = False,
+ color: bool | None = None,
+) -> None:
+ """Print a message and newline to stdout or a file. This should be
+ used instead of :func:`print` because it provides better support
+ for different data, files, and environments.
+
+ Compared to :func:`print`, this does the following:
+
+ - Ensures that the output encoding is not misconfigured on Linux.
+ - Supports Unicode in the Windows console.
+ - Supports writing to binary outputs, and supports writing bytes
+ to text outputs.
+ - Supports colors and styles on Windows.
+ - Removes ANSI color and style codes if the output does not look
+ like an interactive terminal.
+ - Always flushes the output.
+
+ :param message: The string or bytes to output. Other objects are
+ converted to strings.
+ :param file: The file to write to. Defaults to ``stdout``.
+ :param err: Write to ``stderr`` instead of ``stdout``.
+ :param nl: Print a newline after the message. Enabled by default.
+ :param color: Force showing or hiding colors and other styles. By
+ default Click will remove color if the output does not look like
+ an interactive terminal.
+
+ .. versionchanged:: 6.0
+ Support Unicode output on the Windows console. Click does not
+ modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()``
+ will still not support Unicode.
+
+ .. versionchanged:: 4.0
+ Added the ``color`` parameter.
+
+ .. versionadded:: 3.0
+ Added the ``err`` parameter.
+
+ .. versionchanged:: 2.0
+ Support colors on Windows if colorama is installed.
+ """
+ if file is None:
+ if err:
+ file = _default_text_stderr()
+ else:
+ file = _default_text_stdout()
+
+ # There are no standard streams attached to write to. For example,
+ # pythonw on Windows.
+ if file is None:
+ return
+
+ # Convert non bytes/text into the native string type.
+ if message is not None and not isinstance(message, (str, bytes, bytearray)):
+ out: str | bytes | bytearray | None = str(message)
+ else:
+ out = message
+
+ if nl:
+ out = out or ""
+ if isinstance(out, str):
+ out += "\n"
+ else:
+ out += b"\n"
+
+ if not out:
+ file.flush()
+ return
+
+ # If there is a message and the value looks like bytes, we manually
+ # need to find the binary stream and write the message in there.
+ # This is done separately so that most stream types will work as you
+ # would expect. Eg: you can write to StringIO for other cases.
+ if isinstance(out, (bytes, bytearray)):
+ binary_file = _find_binary_writer(file)
+
+ if binary_file is not None:
+ file.flush()
+ binary_file.write(out)
+ binary_file.flush()
+ return
+
+ # ANSI style code support. For no message or bytes, nothing happens.
+ # When outputting to a file instead of a terminal, strip codes.
+ else:
+ color = resolve_color_default(color)
+
+ if should_strip_ansi(file, color):
+ out = strip_ansi(out)
+ elif WIN:
+ if auto_wrap_for_ansi is not None:
+ file = auto_wrap_for_ansi(file, color) # type: ignore
+ elif not color:
+ out = strip_ansi(out)
+
+ file.write(out) # type: ignore
+ file.flush()
+
+
+def get_binary_stream(name: t.Literal["stdin", "stdout", "stderr"]) -> t.BinaryIO:
+ """Returns a system stream for byte processing.
+
+ :param name: the name of the stream to open. Valid names are ``'stdin'``,
+ ``'stdout'`` and ``'stderr'``
+ """
+ opener = binary_streams.get(name)
+ if opener is None:
+ raise TypeError(f"Unknown standard stream '{name}'")
+ return opener()
+
+
+def get_text_stream(
+ name: t.Literal["stdin", "stdout", "stderr"],
+ encoding: str | None = None,
+ errors: str | None = "strict",
+) -> t.TextIO:
+ """Returns a system stream for text processing. This usually returns
+ a wrapped stream around a binary stream returned from
+ :func:`get_binary_stream` but it also can take shortcuts for already
+ correctly configured streams.
+
+ :param name: the name of the stream to open. Valid names are ``'stdin'``,
+ ``'stdout'`` and ``'stderr'``
+ :param encoding: overrides the detected default encoding.
+ :param errors: overrides the default error mode.
+ """
+ opener = text_streams.get(name)
+ if opener is None:
+ raise TypeError(f"Unknown standard stream '{name}'")
+ return opener(encoding, errors)
+
+
+def open_file(
+ filename: str | os.PathLike[str],
+ mode: str = "r",
+ encoding: str | None = None,
+ errors: str | None = "strict",
+ lazy: bool = False,
+ atomic: bool = False,
+) -> t.IO[t.Any]:
+ """Open a file, with extra behavior to handle ``'-'`` to indicate
+ a standard stream, lazy open on write, and atomic write. Similar to
+ the behavior of the :class:`~click.File` param type.
+
+ If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is
+ wrapped so that using it in a context manager will not close it.
+ This makes it possible to use the function without accidentally
+ closing a standard stream:
+
+ .. code-block:: python
+
+ with open_file(filename) as f:
+ ...
+
+ :param filename: The name or Path of the file to open, or ``'-'`` for
+ ``stdin``/``stdout``.
+ :param mode: The mode in which to open the file.
+ :param encoding: The encoding to decode or encode a file opened in
+ text mode.
+ :param errors: The error handling mode.
+ :param lazy: Wait to open the file until it is accessed. For read
+ mode, the file is temporarily opened to raise access errors
+ early, then closed until it is read again.
+ :param atomic: Write to a temporary file and replace the given file
+ on close.
+
+ .. versionadded:: 3.0
+ """
+ if lazy:
+ return t.cast(
+ "t.IO[t.Any]", LazyFile(filename, mode, encoding, errors, atomic=atomic)
+ )
+
+ f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic)
+
+ if not should_close:
+ f = t.cast("t.IO[t.Any]", KeepOpenFile(f))
+
+ return f
+
+
+def format_filename(
+ filename: str | bytes | os.PathLike[str] | os.PathLike[bytes],
+ shorten: bool = False,
+) -> str:
+ """Format a filename as a string for display. Ensures the filename can be
+ displayed by replacing any invalid bytes or surrogate escapes in the name
+ with the replacement character ``�``.
+
+ Invalid bytes or surrogate escapes will raise an error when written to a
+ stream with ``errors="strict"``. This will typically happen with ``stdout``
+ when the locale is something like ``en_GB.UTF-8``.
+
+ Many scenarios *are* safe to write surrogates though, due to PEP 538 and
+ PEP 540, including:
+
+ - Writing to ``stderr``, which uses ``errors="backslashreplace"``.
+ - The system has ``LANG=C.UTF-8``, ``C``, or ``POSIX``. Python opens
+ stdout and stderr with ``errors="surrogateescape"``.
+ - None of ``LANG/LC_*`` are set. Python assumes ``LANG=C.UTF-8``.
+ - Python is started in UTF-8 mode with ``PYTHONUTF8=1`` or ``-X utf8``.
+ Python opens stdout and stderr with ``errors="surrogateescape"``.
+
+ :param filename: formats a filename for UI display. This will also convert
+ the filename into unicode without failing.
+ :param shorten: this optionally shortens the filename to strip of the
+ path that leads up to it.
+ """
+ if shorten:
+ filename = os.path.basename(filename)
+ else:
+ filename = os.fspath(filename)
+
+ if isinstance(filename, bytes):
+ filename = filename.decode(sys.getfilesystemencoding(), "replace")
+ else:
+ filename = filename.encode("utf-8", "surrogateescape").decode(
+ "utf-8", "replace"
+ )
+
+ return filename
+
+
+def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str:
+ r"""Returns the config folder for the application. The default behavior
+ is to return whatever is most appropriate for the operating system.
+
+ To give you an idea, for an app called ``"Foo Bar"``, something like
+ the following folders could be returned:
+
+ Mac OS X:
+ ``~/Library/Application Support/Foo Bar``
+ Mac OS X (POSIX):
+ ``~/.foo-bar``
+ Unix:
+ ``~/.config/foo-bar``
+ Unix (POSIX):
+ ``~/.foo-bar``
+ Windows (roaming):
+ ``C:\Users\\AppData\Roaming\Foo Bar``
+ Windows (not roaming):
+ ``C:\Users\\AppData\Local\Foo Bar``
+
+ .. versionadded:: 2.0
+
+ :param app_name: the application name. This should be properly capitalized
+ and can contain whitespace.
+ :param roaming: controls if the folder should be roaming or not on Windows.
+ Has no effect otherwise.
+ :param force_posix: if this is set to `True` then on any POSIX system the
+ folder will be stored in the home folder with a leading
+ dot instead of the XDG config home or darwin's
+ application support folder.
+ """
+ if WIN:
+ key = "APPDATA" if roaming else "LOCALAPPDATA"
+ folder = os.environ.get(key)
+ if folder is None:
+ folder = os.path.expanduser("~")
+ return os.path.join(folder, app_name)
+ if force_posix:
+ return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}"))
+ if sys.platform == "darwin":
+ return os.path.join(
+ os.path.expanduser("~/Library/Application Support"), app_name
+ )
+ return os.path.join(
+ os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")),
+ _posixify(app_name),
+ )
+
+
+class PacifyFlushWrapper:
+ """This wrapper is used to catch and suppress BrokenPipeErrors resulting
+ from ``.flush()`` being called on broken pipe during the shutdown/final-GC
+ of the Python interpreter. Notably ``.flush()`` is always called on
+ ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any
+ other cleanup code, and the case where the underlying file is not a broken
+ pipe, all calls and attributes are proxied.
+ """
+
+ def __init__(self, wrapped: t.IO[t.Any]) -> None:
+ self.wrapped = wrapped
+
+ def flush(self) -> None:
+ try:
+ self.wrapped.flush()
+ except OSError as e:
+ import errno
+
+ if e.errno != errno.EPIPE:
+ raise
+
+ def __getattr__(self, attr: str) -> t.Any:
+ return getattr(self.wrapped, attr)
+
+
+def _detect_program_name(
+ path: str | None = None, _main: ModuleType | None = None
+) -> str:
+ """Determine the command used to run the program, for use in help
+ text. If a file or entry point was executed, the file name is
+ returned. If ``python -m`` was used to execute a module or package,
+ ``python -m name`` is returned.
+
+ This doesn't try to be too precise, the goal is to give a concise
+ name for help text. Files are only shown as their name without the
+ path. ``python`` is only shown for modules, and the full path to
+ ``sys.executable`` is not shown.
+
+ :param path: The Python file being executed. Python puts this in
+ ``sys.argv[0]``, which is used by default.
+ :param _main: The ``__main__`` module. This should only be passed
+ during internal testing.
+
+ .. versionadded:: 8.0
+ Based on command args detection in the Werkzeug reloader.
+
+ :meta private:
+ """
+ if _main is None:
+ _main = sys.modules["__main__"]
+
+ if not path:
+ path = sys.argv[0]
+
+ # The value of __package__ indicates how Python was called. It may
+ # not exist if a setuptools script is installed as an egg. It may be
+ # set incorrectly for entry points created with pip on Windows.
+ # It is set to "" inside a Shiv or PEX zipapp.
+ if getattr(_main, "__package__", None) in {None, ""} or (
+ os.name == "nt"
+ and _main.__package__ == ""
+ and not os.path.exists(path)
+ and os.path.exists(f"{path}.exe")
+ ):
+ # Executed a file, like "python app.py".
+ return os.path.basename(path)
+
+ # Executed a module, like "python -m example".
+ # Rewritten by Python from "-m script" to "/path/to/script.py".
+ # Need to look at main module to determine how it was executed.
+ py_module = t.cast(str, _main.__package__)
+ name = os.path.splitext(os.path.basename(path))[0]
+
+ # A submodule like "example.cli".
+ if name != "__main__":
+ py_module = f"{py_module}.{name}"
+
+ return f"python -m {py_module.lstrip('.')}"
+
+
+def _expand_args(
+ args: cabc.Iterable[str],
+ *,
+ user: bool = True,
+ env: bool = True,
+ glob_recursive: bool = True,
+) -> list[str]:
+ """Simulate Unix shell expansion with Python functions.
+
+ See :func:`glob.glob`, :func:`os.path.expanduser`, and
+ :func:`os.path.expandvars`.
+
+ This is intended for use on Windows, where the shell does not do any
+ expansion. It may not exactly match what a Unix shell would do.
+
+ :param args: List of command line arguments to expand.
+ :param user: Expand user home directory.
+ :param env: Expand environment variables.
+ :param glob_recursive: ``**`` matches directories recursively.
+
+ .. versionchanged:: 8.1
+ Invalid glob patterns are treated as empty expansions rather
+ than raising an error.
+
+ .. versionadded:: 8.0
+
+ :meta private:
+ """
+ from glob import glob
+
+ out = []
+
+ for arg in args:
+ if user:
+ arg = os.path.expanduser(arg)
+
+ if env:
+ arg = os.path.expandvars(arg)
+
+ try:
+ matches = glob(arg, recursive=glob_recursive)
+ except re.error:
+ matches = []
+
+ if not matches:
+ out.append(arg)
+ else:
+ out.extend(matches)
+
+ return out
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/INSTALLER b/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/LICENSE b/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/LICENSE
new file mode 100644
index 00000000..b11f379e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/LICENSE
@@ -0,0 +1,3 @@
+This software is made available under the terms of *either* of the licenses
+found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made
+under the terms of *both* these licenses.
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/LICENSE.APACHE b/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/LICENSE.APACHE
new file mode 100644
index 00000000..62589edd
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/LICENSE.APACHE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ https://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/LICENSE.BSD b/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/LICENSE.BSD
new file mode 100644
index 00000000..ec1a29d3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/LICENSE.BSD
@@ -0,0 +1,27 @@
+Copyright (c) Individual contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ 3. Neither the name of PyCA Cryptography nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/METADATA b/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/METADATA
new file mode 100644
index 00000000..2106e2e6
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/METADATA
@@ -0,0 +1,133 @@
+Metadata-Version: 2.1
+Name: cryptography
+Version: 41.0.7
+Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers.
+Author-email: The Python Cryptographic Authority and individual contributors
+License: Apache-2.0 OR BSD-3-Clause
+Project-URL: homepage, https://github.com/pyca/cryptography
+Project-URL: documentation, https://cryptography.io/
+Project-URL: source, https://github.com/pyca/cryptography/
+Project-URL: issues, https://github.com/pyca/cryptography/issues
+Project-URL: changelog, https://cryptography.io/en/latest/changelog/
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Natural Language :: English
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: POSIX :: BSD
+Classifier: Operating System :: POSIX :: Linux
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Security :: Cryptography
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+License-File: LICENSE.APACHE
+License-File: LICENSE.BSD
+Requires-Dist: cffi >=1.12
+Provides-Extra: docs
+Requires-Dist: sphinx >=5.3.0 ; extra == 'docs'
+Requires-Dist: sphinx-rtd-theme >=1.1.1 ; extra == 'docs'
+Provides-Extra: docstest
+Requires-Dist: pyenchant >=1.6.11 ; extra == 'docstest'
+Requires-Dist: twine >=1.12.0 ; extra == 'docstest'
+Requires-Dist: sphinxcontrib-spelling >=4.0.1 ; extra == 'docstest'
+Provides-Extra: nox
+Requires-Dist: nox ; extra == 'nox'
+Provides-Extra: pep8test
+Requires-Dist: black ; extra == 'pep8test'
+Requires-Dist: ruff ; extra == 'pep8test'
+Requires-Dist: mypy ; extra == 'pep8test'
+Requires-Dist: check-sdist ; extra == 'pep8test'
+Provides-Extra: sdist
+Requires-Dist: build ; extra == 'sdist'
+Provides-Extra: ssh
+Requires-Dist: bcrypt >=3.1.5 ; extra == 'ssh'
+Provides-Extra: test
+Requires-Dist: pytest >=6.2.0 ; extra == 'test'
+Requires-Dist: pytest-benchmark ; extra == 'test'
+Requires-Dist: pytest-cov ; extra == 'test'
+Requires-Dist: pytest-xdist ; extra == 'test'
+Requires-Dist: pretend ; extra == 'test'
+Provides-Extra: test-randomorder
+Requires-Dist: pytest-randomly ; extra == 'test-randomorder'
+
+pyca/cryptography
+=================
+
+.. image:: https://img.shields.io/pypi/v/cryptography.svg
+ :target: https://pypi.org/project/cryptography/
+ :alt: Latest Version
+
+.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest
+ :target: https://cryptography.io
+ :alt: Latest Docs
+
+.. image:: https://github.com/pyca/cryptography/workflows/CI/badge.svg?branch=main
+ :target: https://github.com/pyca/cryptography/actions?query=workflow%3ACI+branch%3Amain
+
+
+``cryptography`` is a package which provides cryptographic recipes and
+primitives to Python developers. Our goal is for it to be your "cryptographic
+standard library". It supports Python 3.7+ and PyPy3 7.3.10+.
+
+``cryptography`` includes both high level recipes and low level interfaces to
+common cryptographic algorithms such as symmetric ciphers, message digests, and
+key derivation functions. For example, to encrypt something with
+``cryptography``'s high level symmetric encryption recipe:
+
+.. code-block:: pycon
+
+ >>> from cryptography.fernet import Fernet
+ >>> # Put this somewhere safe!
+ >>> key = Fernet.generate_key()
+ >>> f = Fernet(key)
+ >>> token = f.encrypt(b"A really secret message. Not for prying eyes.")
+ >>> token
+ b'...'
+ >>> f.decrypt(token)
+ b'A really secret message. Not for prying eyes.'
+
+You can find more information in the `documentation`_.
+
+You can install ``cryptography`` with:
+
+.. code-block:: console
+
+ $ pip install cryptography
+
+For full details see `the installation documentation`_.
+
+Discussion
+~~~~~~~~~~
+
+If you run into bugs, you can file them in our `issue tracker`_.
+
+We maintain a `cryptography-dev`_ mailing list for development discussion.
+
+You can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get
+involved.
+
+Security
+~~~~~~~~
+
+Need to report a security issue? Please consult our `security reporting`_
+documentation.
+
+
+.. _`documentation`: https://cryptography.io/
+.. _`the installation documentation`: https://cryptography.io/en/latest/installation/
+.. _`issue tracker`: https://github.com/pyca/cryptography/issues
+.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev
+.. _`security reporting`: https://cryptography.io/en/latest/security/
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/RECORD b/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/RECORD
new file mode 100644
index 00000000..77b014fe
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/RECORD
@@ -0,0 +1,173 @@
+cryptography-41.0.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+cryptography-41.0.7.dist-info/LICENSE,sha256=Pgx8CRqUi4JTO6mP18u0BDLW8amsv4X1ki0vmak65rs,197
+cryptography-41.0.7.dist-info/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360
+cryptography-41.0.7.dist-info/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532
+cryptography-41.0.7.dist-info/METADATA,sha256=h4C2cL9sbR7ObF6jD7hUT7xOfSvzZBli6AmX-vngctA,5159
+cryptography-41.0.7.dist-info/RECORD,,
+cryptography-41.0.7.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+cryptography-41.0.7.dist-info/WHEEL,sha256=Bnup3_Y_tMShHsCuO2E9NdrjRJkTtSD1dYVt3WSGhpU,112
+cryptography-41.0.7.dist-info/top_level.txt,sha256=KNaT-Sn2K4uxNaEbe6mYdDn3qWDMlp4y-MtWfB73nJc,13
+cryptography/__about__.py,sha256=uPXMbbcptt7EzZ_jllGRx0pVdMn-NBsAM4L74hOv-b0,445
+cryptography/__init__.py,sha256=iVPlBlXWTJyiFeRedxcbMPhyHB34viOM10d72vGnWuE,364
+cryptography/__pycache__/__about__.cpython-312.pyc,,
+cryptography/__pycache__/__init__.cpython-312.pyc,,
+cryptography/__pycache__/exceptions.cpython-312.pyc,,
+cryptography/__pycache__/fernet.cpython-312.pyc,,
+cryptography/__pycache__/utils.cpython-312.pyc,,
+cryptography/exceptions.py,sha256=EHe7XM2_OtdOM1bZE0ci-4GUhtOlEQ6fQXhK2Igf0qA,1118
+cryptography/fernet.py,sha256=TVZy4Dtkpl7kWIpvuKcNldE95IEjTQ0MfHgRsLdnDSM,6886
+cryptography/hazmat/__init__.py,sha256=5IwrLWrVp0AjEr_4FdWG_V057NSJGY_W4egNNsuct0g,455
+cryptography/hazmat/__pycache__/__init__.cpython-312.pyc,,
+cryptography/hazmat/__pycache__/_oid.cpython-312.pyc,,
+cryptography/hazmat/_oid.py,sha256=gxhMHKpu9Xsi6uHCGZ_-soYMXj_izOIFaxjUKWbCPeE,14441
+cryptography/hazmat/backends/__init__.py,sha256=O5jvKFQdZnXhKeqJ-HtulaEL9Ni7mr1mDzZY5kHlYhI,361
+cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc,,
+cryptography/hazmat/backends/openssl/__init__.py,sha256=p3jmJfnCag9iE5sdMrN6VvVEu55u46xaS_IjoI0SrmA,305
+cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-312.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-312.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-312.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-312.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-312.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-312.pyc,,
+cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-312.pyc,,
+cryptography/hazmat/backends/openssl/aead.py,sha256=s3zXcVQf0COIOuOzI8usebWpznGnyZ7GhnmlJYu7QXA,15967
+cryptography/hazmat/backends/openssl/backend.py,sha256=491FCrjeOG7S9bXskUosirXFP84ntwAQ-U0BxcibtqM,73321
+cryptography/hazmat/backends/openssl/ciphers.py,sha256=lxWrvnufudsDI2bpwNs2c8XLILbAE2j2rMSD1nhnPVg,10358
+cryptography/hazmat/backends/openssl/cmac.py,sha256=pHgQOIRfR4cIDa5ltcKFtgjqPTXbOLyRQmmqv9JlbUk,3035
+cryptography/hazmat/backends/openssl/decode_asn1.py,sha256=kz6gys8wuJhrx4QyU6enYx7UatNHr0LB3TI1jH3oQ54,1148
+cryptography/hazmat/backends/openssl/ec.py,sha256=GKzh3mZKvgsM1jqM88-4XikHHalpV-Efyskclt8yxYg,11474
+cryptography/hazmat/backends/openssl/rsa.py,sha256=P_ak-2zvA6VBt_P0ldzTSCUkcjo2GhYt_HLn8CVvWtE,21825
+cryptography/hazmat/backends/openssl/utils.py,sha256=UoguO26QzwN4lsMAltsIrgAlbi3SOeSrexZs1-QPNu8,2190
+cryptography/hazmat/bindings/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
+cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc,,
+cryptography/hazmat/bindings/_rust.abi3.so,sha256=qkbrd72TN7vk0ivAz_VE-ZefNyDxCLwLSiAzhgMF8-Q,13787648
+cryptography/hazmat/bindings/_rust/__init__.pyi,sha256=IumK7zP9Ko3HjLLb5hwZiY2rbfmfsuyTZLLcHOMvSdk,981
+cryptography/hazmat/bindings/_rust/_openssl.pyi,sha256=mpNJLuYLbCVrd5i33FBTmWwL_55Dw7JPkSLlSX9Q7oI,230
+cryptography/hazmat/bindings/_rust/asn1.pyi,sha256=9CyI-grOsLQB_hfnhJPoG9dNOdJ7Zg6B0iUpzCowh44,592
+cryptography/hazmat/bindings/_rust/exceptions.pyi,sha256=exXr2xw_0pB1kk93cYbM3MohbzoUkjOms1ZMUi0uQZE,640
+cryptography/hazmat/bindings/_rust/ocsp.pyi,sha256=RzVaLkY0y9L8W8opAL_uVD8bySKxP23pSQtEbLOStXI,905
+cryptography/hazmat/bindings/_rust/openssl/__init__.pyi,sha256=j764U4RRBZbDuOfjQxRqU7rCf74kgM-3AnTIjLdRy3E,970
+cryptography/hazmat/bindings/_rust/openssl/dh.pyi,sha256=0FVY1t5qM9HV_ZKDIcdJI2a72i1fHKyTvYIJb5UnH4M,896
+cryptography/hazmat/bindings/_rust/openssl/dsa.pyi,sha256=43in4PCsm2kz_H7RQFLBKqhDsUmb4yWop6dpYeVDg-4,764
+cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi,sha256=E2GXAgibfRGqKxskH8MfZI8gHFoMJJOTjG7Elg2gOww,629
+cryptography/hazmat/bindings/_rust/openssl/ed448.pyi,sha256=pk_kx5Biq8O53d2joOT-cXuwCrbFPicV7iaqYdeiIAI,603
+cryptography/hazmat/bindings/_rust/openssl/hashes.pyi,sha256=J8HoN0GdtPcjRAfNHr5Elva_nkmQfq63L75_z9dd8Uc,573
+cryptography/hazmat/bindings/_rust/openssl/hmac.pyi,sha256=ZmLJ73pmxcZFC1XosWEiXMRYtvJJor3ZLdCQOJu85Cw,662
+cryptography/hazmat/bindings/_rust/openssl/kdf.pyi,sha256=wPS5c7NLspM2632II0I4iH1RSxZvSRtBOVqmpyQATfk,544
+cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi,sha256=9iogF7Q4i81IkOS-IMXp6HvxFF_3cNy_ucrAjVQnn14,540
+cryptography/hazmat/bindings/_rust/openssl/x25519.pyi,sha256=-1F5QDZfrdhmDLKTeSERuuDUHBTV-EhxIYk9mjpwcG4,616
+cryptography/hazmat/bindings/_rust/openssl/x448.pyi,sha256=SdL4blscYBEvuWY4SuNAY1s5zFaGj38eQ-bulVBZvFg,590
+cryptography/hazmat/bindings/_rust/pkcs7.pyi,sha256=VkTC78wjJgb_qrboOYIFPuFZ3W46zsr6zsxnlrOMwao,460
+cryptography/hazmat/bindings/_rust/x509.pyi,sha256=j6AbXBZSXeJHLSrXnaapbiPfle-znfk9uJUa_zqxgy4,1878
+cryptography/hazmat/bindings/openssl/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
+cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc,,
+cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc,,
+cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc,,
+cryptography/hazmat/bindings/openssl/_conditional.py,sha256=DeECq7AKguhs390ZmxgItdqPLzyrKGJk-3KlHJMkXoY,9098
+cryptography/hazmat/bindings/openssl/binding.py,sha256=0x3kzvq2grHu4gbbgEIzEVrX6unp71EEs1hx0o-uuOM,6696
+cryptography/hazmat/primitives/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
+cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc,,
+cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc,,
+cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc,,
+cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc,,
+cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc,,
+cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc,,
+cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc,,
+cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc,,
+cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc,,
+cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc,,
+cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc,,
+cryptography/hazmat/primitives/_asymmetric.py,sha256=RhgcouUB6HTiFDBrR1LxqkMjpUxIiNvQ1r_zJjRG6qQ,532
+cryptography/hazmat/primitives/_cipheralgorithm.py,sha256=7LPkpw-DrgyvmBMUjvXeBvojVZPtXhFgfelUftnxPGw,1093
+cryptography/hazmat/primitives/_serialization.py,sha256=U0DU0ZzOLJppCQsh9EJH6vGYoHotBolfNyRyx3wr1l0,5216
+cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
+cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc,,
+cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc,,
+cryptography/hazmat/primitives/asymmetric/dh.py,sha256=XsthqjvExWWOyePs0PxT4MestU9QeGuL-Hx7fWzTguQ,7013
+cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=aaTY7EMLTzaWs-jhOMpMAfa2GnfhoqsCKZPKAs35L40,8263
+cryptography/hazmat/primitives/asymmetric/ec.py,sha256=L1WoWPYevJ6Pk2T1etbnHbvr6AeXFccckPNNiyUVoNM,12867
+cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=wl2NCCP4bZdUCqZGMkOOd6eaxjU1vXPAIwzUuFPE__w,3489
+cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=2MCJ87qcyCCsjj0OvrfWFxPX8CgaC3d0mr78bt_vDIY,3440
+cryptography/hazmat/primitives/asymmetric/padding.py,sha256=6p8Ojiax_2tcm1aTnNOAkinriCJ67nSTxugg34f-hzk,2717
+cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=vxvOryF00WL8mZQv9bs_-LlgobYLiPYfX246_j_ICtA,11623
+cryptography/hazmat/primitives/asymmetric/types.py,sha256=LnsOJym-wmPUJ7Knu_7bCNU3kIiELCd6krOaW_JU08I,2996
+cryptography/hazmat/primitives/asymmetric/utils.py,sha256=DPTs6T4F-UhwzFQTh-1fSEpQzazH2jf2xpIro3ItF4o,790
+cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=8YJAIaU7w09jTnPU_cLwd98fMHIECgfA3R7P3Ktv-CA,3437
+cryptography/hazmat/primitives/asymmetric/x448.py,sha256=y-Yj-rgciiuH1g6FJLZftvAqgOnzT1on9gCisru7vBc,3358
+cryptography/hazmat/primitives/ciphers/__init__.py,sha256=kAyb9NSczqTrCWj0HEoVp3Cxo7AHW8ibPFQz-ZHsOtA,680
+cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc,,
+cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc,,
+cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc,,
+cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc,,
+cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc,,
+cryptography/hazmat/primitives/ciphers/aead.py,sha256=DY7qKmbt0bgB1GB7i-fQrbjEfwFG8wfUfVHvc7DA2YY,12067
+cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=SCDskXc9xyzsz0NjND6tAX8t17jYTbUB2sww1ub9GuY,5000
+cryptography/hazmat/primitives/ciphers/base.py,sha256=PqNDltHdDxBhLhgtfO707H07sSOLA6ZVwjZlalOJTAo,8286
+cryptography/hazmat/primitives/ciphers/modes.py,sha256=YJQXi4PJGIIZ1rgchbMH47Ed-YiUcUSjLPEOuV8rgGE,8361
+cryptography/hazmat/primitives/cmac.py,sha256=YaeWksCYaqVoqf9zHRThAJ95ZvPUioAOfXwZUWiPzD8,2065
+cryptography/hazmat/primitives/constant_time.py,sha256=xdunWT0nf8OvKdcqUhhlFKayGp4_PgVJRU2W1wLSr_A,422
+cryptography/hazmat/primitives/hashes.py,sha256=VJpnbK2sQN2bEqwRTOoCB4nuxYx5CnqFiScMJNyhsrI,5115
+cryptography/hazmat/primitives/hmac.py,sha256=RpB3z9z5skirCQrm7zQbtnp9pLMnAjrlTUvKqF5aDDc,423
+cryptography/hazmat/primitives/kdf/__init__.py,sha256=4XibZnrYq4hh5xBjWiIXzaYW6FKx8hPbVaa_cB9zS64,750
+cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc,,
+cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc,,
+cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc,,
+cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc,,
+cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc,,
+cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc,,
+cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc,,
+cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=wGYWgILmxQWnCPkbAH1RpsCHrdKgmYrCEVrCvXVGCo8,3726
+cryptography/hazmat/primitives/kdf/hkdf.py,sha256=bBYr1yUIbOlJIEd6ZoLYcXm_yd-H54An9kNcFIJ3kbo,3045
+cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=qPL6TmDUmkus6CW3ylTJfG8N8egZhjQOyXrSyLLpnak,9232
+cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=1CCH9Q5gXUpnZd3c8d8bCXgpJ3s2hZZGBnuG7FH1waM,2012
+cryptography/hazmat/primitives/kdf/scrypt.py,sha256=4QONhjxA_ZtuQtQ7QV3FnbB8ftrFnM52B4HPfV7hFys,2354
+cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=S3B4Enk2Yxj9txpairotaXkavuZqQ6t6MB5a28U02ek,2002
+cryptography/hazmat/primitives/keywrap.py,sha256=Qb_N2V_E1Dti5VtDXnrtTYtJDZ8aMpur8BY5yxrXclg,5678
+cryptography/hazmat/primitives/padding.py,sha256=8pCeLaqwQPSGf51j06U5C_INvgYWVWPv3m9mxUERGmU,6242
+cryptography/hazmat/primitives/poly1305.py,sha256=P5EPQV-RB_FJPahpg01u0Ts4S_PnAmsroxIGXbGeRRo,355
+cryptography/hazmat/primitives/serialization/__init__.py,sha256=6ZlL3EicEzoGdMOat86w8y_XICCnlHdCjFI97rMxRDg,1653
+cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc,,
+cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc,,
+cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc,,
+cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc,,
+cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc,,
+cryptography/hazmat/primitives/serialization/base.py,sha256=VZjIIqnbb-x38qpg2Wf_IxZvqjsgcEzNQtQoeJiQfpw,1986
+cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=NOzFxArlZhdjfgfugs8nERho1eyaxujXKGUKINchek4,6767
+cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=BCvlPubXQOunb76emISK89PX9qXcBQI2CRPNe85VTZk,7392
+cryptography/hazmat/primitives/serialization/ssh.py,sha256=aLCYLPY3W1kerfCwadn5aYNzwcwIQl9c7RcsB8CKfuc,51027
+cryptography/hazmat/primitives/twofactor/__init__.py,sha256=tmMZGB-g4IU1r7lIFqASU019zr0uPp_wEBYcwdDCKCA,258
+cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc,,
+cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc,,
+cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc,,
+cryptography/hazmat/primitives/twofactor/hotp.py,sha256=uZ0PSKYDZOL0aAobiw1Zd2HD0W2Ei1niUNC2v7Tnpc8,3010
+cryptography/hazmat/primitives/twofactor/totp.py,sha256=cMbWlAapOM1SfezEx9MoMHpCW9ingNXCg6OsGv4T8jc,1473
+cryptography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+cryptography/utils.py,sha256=DfdXc9M4kmAboE2a0pPiISt5LVnW-jhhXURy8nDHae0,4018
+cryptography/x509/__init__.py,sha256=DzZE8bR-3iiVi3Wrcq7-g5Pm64fCr5aqsTNyi_rjJu0,7870
+cryptography/x509/__pycache__/__init__.cpython-312.pyc,,
+cryptography/x509/__pycache__/base.cpython-312.pyc,,
+cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc,,
+cryptography/x509/__pycache__/extensions.cpython-312.pyc,,
+cryptography/x509/__pycache__/general_name.cpython-312.pyc,,
+cryptography/x509/__pycache__/name.cpython-312.pyc,,
+cryptography/x509/__pycache__/ocsp.cpython-312.pyc,,
+cryptography/x509/__pycache__/oid.cpython-312.pyc,,
+cryptography/x509/base.py,sha256=FbS6EFE3uJ3O-zbFPRjsO6DckrNSN5TJNZMJcnzUWFQ,35677
+cryptography/x509/certificate_transparency.py,sha256=6HvzAD0dlSQVxy6tnDhGj0-pisp1MaJ9bxQNRr92inI,2261
+cryptography/x509/extensions.py,sha256=rFEcfZiFvcONs1ot03d68dAMK2U75w0s3g9mhyWBRcI,68365
+cryptography/x509/general_name.py,sha256=zm8GxNgVJuLD6rN488c5zdHhxp5gUxeRzw8enZMWDQ0,7868
+cryptography/x509/name.py,sha256=aZ2dpsinhkza3eTxT1vNmWuFMQ7fmcA0hs4npgnkf9Q,14855
+cryptography/x509/ocsp.py,sha256=48iW7xbZ9mZLELSEl7Wwjb4vYhOQ3KcNtqgKsAb_UD0,18534
+cryptography/x509/oid.py,sha256=fFosjGsnIB_w_0YrzZv1ggkSVwZl7xmY0zofKZNZkDA,829
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/REQUESTED b/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/REQUESTED
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/WHEEL b/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/WHEEL
new file mode 100644
index 00000000..5869d67e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.42.0)
+Root-Is-Purelib: false
+Tag: cp37-abi3-manylinux_2_28_x86_64
+
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/top_level.txt b/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/top_level.txt
new file mode 100644
index 00000000..0d38bc5e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography-41.0.7.dist-info/top_level.txt
@@ -0,0 +1 @@
+cryptography
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/__about__.py b/Backend/venv/lib/python3.12/site-packages/cryptography/__about__.py
new file mode 100644
index 00000000..014e0adb
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/__about__.py
@@ -0,0 +1,17 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+__all__ = [
+ "__version__",
+ "__author__",
+ "__copyright__",
+]
+
+__version__ = "41.0.7"
+
+
+__author__ = "The Python Cryptographic Authority and individual contributors"
+__copyright__ = f"Copyright 2013-2023 {__author__}"
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/__init__.py b/Backend/venv/lib/python3.12/site-packages/cryptography/__init__.py
new file mode 100644
index 00000000..86b9a257
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/__init__.py
@@ -0,0 +1,13 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography.__about__ import __author__, __copyright__, __version__
+
+__all__ = [
+ "__version__",
+ "__author__",
+ "__copyright__",
+]
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/__about__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/__about__.cpython-312.pyc
new file mode 100644
index 00000000..16a74985
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/__about__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..dea6f303
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/exceptions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/exceptions.cpython-312.pyc
new file mode 100644
index 00000000..16c476e0
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/exceptions.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc
new file mode 100644
index 00000000..0c0e986a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/fernet.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/utils.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/utils.cpython-312.pyc
new file mode 100644
index 00000000..643a0e10
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/__pycache__/utils.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/exceptions.py b/Backend/venv/lib/python3.12/site-packages/cryptography/exceptions.py
new file mode 100644
index 00000000..47fdd18e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/exceptions.py
@@ -0,0 +1,54 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.hazmat.bindings._rust import exceptions as rust_exceptions
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+
+_Reasons = rust_exceptions._Reasons
+
+
+class UnsupportedAlgorithm(Exception):
+ def __init__(
+ self, message: str, reason: typing.Optional[_Reasons] = None
+ ) -> None:
+ super().__init__(message)
+ self._reason = reason
+
+
+class AlreadyFinalized(Exception):
+ pass
+
+
+class AlreadyUpdated(Exception):
+ pass
+
+
+class NotYetFinalized(Exception):
+ pass
+
+
+class InvalidTag(Exception):
+ pass
+
+
+class InvalidSignature(Exception):
+ pass
+
+
+class InternalError(Exception):
+ def __init__(
+ self, msg: str, err_code: typing.List[rust_openssl.OpenSSLError]
+ ) -> None:
+ super().__init__(msg)
+ self.err_code = err_code
+
+
+class InvalidKey(Exception):
+ pass
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/fernet.py b/Backend/venv/lib/python3.12/site-packages/cryptography/fernet.py
new file mode 100644
index 00000000..ad8fb40b
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/fernet.py
@@ -0,0 +1,221 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import base64
+import binascii
+import os
+import time
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import InvalidSignature
+from cryptography.hazmat.primitives import hashes, padding
+from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
+from cryptography.hazmat.primitives.hmac import HMAC
+
+
+class InvalidToken(Exception):
+ pass
+
+
+_MAX_CLOCK_SKEW = 60
+
+
+class Fernet:
+ def __init__(
+ self,
+ key: typing.Union[bytes, str],
+ backend: typing.Any = None,
+ ) -> None:
+ try:
+ key = base64.urlsafe_b64decode(key)
+ except binascii.Error as exc:
+ raise ValueError(
+ "Fernet key must be 32 url-safe base64-encoded bytes."
+ ) from exc
+ if len(key) != 32:
+ raise ValueError(
+ "Fernet key must be 32 url-safe base64-encoded bytes."
+ )
+
+ self._signing_key = key[:16]
+ self._encryption_key = key[16:]
+
+ @classmethod
+ def generate_key(cls) -> bytes:
+ return base64.urlsafe_b64encode(os.urandom(32))
+
+ def encrypt(self, data: bytes) -> bytes:
+ return self.encrypt_at_time(data, int(time.time()))
+
+ def encrypt_at_time(self, data: bytes, current_time: int) -> bytes:
+ iv = os.urandom(16)
+ return self._encrypt_from_parts(data, current_time, iv)
+
+ def _encrypt_from_parts(
+ self, data: bytes, current_time: int, iv: bytes
+ ) -> bytes:
+ utils._check_bytes("data", data)
+
+ padder = padding.PKCS7(algorithms.AES.block_size).padder()
+ padded_data = padder.update(data) + padder.finalize()
+ encryptor = Cipher(
+ algorithms.AES(self._encryption_key),
+ modes.CBC(iv),
+ ).encryptor()
+ ciphertext = encryptor.update(padded_data) + encryptor.finalize()
+
+ basic_parts = (
+ b"\x80"
+ + current_time.to_bytes(length=8, byteorder="big")
+ + iv
+ + ciphertext
+ )
+
+ h = HMAC(self._signing_key, hashes.SHA256())
+ h.update(basic_parts)
+ hmac = h.finalize()
+ return base64.urlsafe_b64encode(basic_parts + hmac)
+
+ def decrypt(
+ self, token: typing.Union[bytes, str], ttl: typing.Optional[int] = None
+ ) -> bytes:
+ timestamp, data = Fernet._get_unverified_token_data(token)
+ if ttl is None:
+ time_info = None
+ else:
+ time_info = (ttl, int(time.time()))
+ return self._decrypt_data(data, timestamp, time_info)
+
+ def decrypt_at_time(
+ self, token: typing.Union[bytes, str], ttl: int, current_time: int
+ ) -> bytes:
+ if ttl is None:
+ raise ValueError(
+ "decrypt_at_time() can only be used with a non-None ttl"
+ )
+ timestamp, data = Fernet._get_unverified_token_data(token)
+ return self._decrypt_data(data, timestamp, (ttl, current_time))
+
+ def extract_timestamp(self, token: typing.Union[bytes, str]) -> int:
+ timestamp, data = Fernet._get_unverified_token_data(token)
+ # Verify the token was not tampered with.
+ self._verify_signature(data)
+ return timestamp
+
+ @staticmethod
+ def _get_unverified_token_data(
+ token: typing.Union[bytes, str]
+ ) -> typing.Tuple[int, bytes]:
+ if not isinstance(token, (str, bytes)):
+ raise TypeError("token must be bytes or str")
+
+ try:
+ data = base64.urlsafe_b64decode(token)
+ except (TypeError, binascii.Error):
+ raise InvalidToken
+
+ if not data or data[0] != 0x80:
+ raise InvalidToken
+
+ if len(data) < 9:
+ raise InvalidToken
+
+ timestamp = int.from_bytes(data[1:9], byteorder="big")
+ return timestamp, data
+
+ def _verify_signature(self, data: bytes) -> None:
+ h = HMAC(self._signing_key, hashes.SHA256())
+ h.update(data[:-32])
+ try:
+ h.verify(data[-32:])
+ except InvalidSignature:
+ raise InvalidToken
+
+ def _decrypt_data(
+ self,
+ data: bytes,
+ timestamp: int,
+ time_info: typing.Optional[typing.Tuple[int, int]],
+ ) -> bytes:
+ if time_info is not None:
+ ttl, current_time = time_info
+ if timestamp + ttl < current_time:
+ raise InvalidToken
+
+ if current_time + _MAX_CLOCK_SKEW < timestamp:
+ raise InvalidToken
+
+ self._verify_signature(data)
+
+ iv = data[9:25]
+ ciphertext = data[25:-32]
+ decryptor = Cipher(
+ algorithms.AES(self._encryption_key), modes.CBC(iv)
+ ).decryptor()
+ plaintext_padded = decryptor.update(ciphertext)
+ try:
+ plaintext_padded += decryptor.finalize()
+ except ValueError:
+ raise InvalidToken
+ unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()
+
+ unpadded = unpadder.update(plaintext_padded)
+ try:
+ unpadded += unpadder.finalize()
+ except ValueError:
+ raise InvalidToken
+ return unpadded
+
+
+class MultiFernet:
+ def __init__(self, fernets: typing.Iterable[Fernet]):
+ fernets = list(fernets)
+ if not fernets:
+ raise ValueError(
+ "MultiFernet requires at least one Fernet instance"
+ )
+ self._fernets = fernets
+
+ def encrypt(self, msg: bytes) -> bytes:
+ return self.encrypt_at_time(msg, int(time.time()))
+
+ def encrypt_at_time(self, msg: bytes, current_time: int) -> bytes:
+ return self._fernets[0].encrypt_at_time(msg, current_time)
+
+ def rotate(self, msg: typing.Union[bytes, str]) -> bytes:
+ timestamp, data = Fernet._get_unverified_token_data(msg)
+ for f in self._fernets:
+ try:
+ p = f._decrypt_data(data, timestamp, None)
+ break
+ except InvalidToken:
+ pass
+ else:
+ raise InvalidToken
+
+ iv = os.urandom(16)
+ return self._fernets[0]._encrypt_from_parts(p, timestamp, iv)
+
+ def decrypt(
+ self, msg: typing.Union[bytes, str], ttl: typing.Optional[int] = None
+ ) -> bytes:
+ for f in self._fernets:
+ try:
+ return f.decrypt(msg, ttl)
+ except InvalidToken:
+ pass
+ raise InvalidToken
+
+ def decrypt_at_time(
+ self, msg: typing.Union[bytes, str], ttl: int, current_time: int
+ ) -> bytes:
+ for f in self._fernets:
+ try:
+ return f.decrypt_at_time(msg, ttl, current_time)
+ except InvalidToken:
+ pass
+ raise InvalidToken
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/__init__.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/__init__.py
new file mode 100644
index 00000000..b9f11870
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/__init__.py
@@ -0,0 +1,13 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+"""
+Hazardous Materials
+
+This is a "Hazardous Materials" module. You should ONLY use it if you're
+100% absolutely sure that you know what you're doing because this module
+is full of land mines, dragons, and dinosaurs with laser guns.
+"""
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..2c4887a1
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-312.pyc
new file mode 100644
index 00000000..23e9f2d9
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/_oid.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/_oid.py
new file mode 100644
index 00000000..01d4b340
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/_oid.py
@@ -0,0 +1,299 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.hazmat.bindings._rust import (
+ ObjectIdentifier as ObjectIdentifier,
+)
+from cryptography.hazmat.primitives import hashes
+
+
+class ExtensionOID:
+ SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9")
+ SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14")
+ KEY_USAGE = ObjectIdentifier("2.5.29.15")
+ SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17")
+ ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18")
+ BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19")
+ NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30")
+ CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31")
+ CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32")
+ POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33")
+ AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35")
+ POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36")
+ EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37")
+ FRESHEST_CRL = ObjectIdentifier("2.5.29.46")
+ INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54")
+ ISSUING_DISTRIBUTION_POINT = ObjectIdentifier("2.5.29.28")
+ AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1")
+ SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11")
+ OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5")
+ TLS_FEATURE = ObjectIdentifier("1.3.6.1.5.5.7.1.24")
+ CRL_NUMBER = ObjectIdentifier("2.5.29.20")
+ DELTA_CRL_INDICATOR = ObjectIdentifier("2.5.29.27")
+ PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier(
+ "1.3.6.1.4.1.11129.2.4.2"
+ )
+ PRECERT_POISON = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3")
+ SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.5")
+ MS_CERTIFICATE_TEMPLATE = ObjectIdentifier("1.3.6.1.4.1.311.21.7")
+
+
+class OCSPExtensionOID:
+ NONCE = ObjectIdentifier("1.3.6.1.5.5.7.48.1.2")
+ ACCEPTABLE_RESPONSES = ObjectIdentifier("1.3.6.1.5.5.7.48.1.4")
+
+
+class CRLEntryExtensionOID:
+ CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29")
+ CRL_REASON = ObjectIdentifier("2.5.29.21")
+ INVALIDITY_DATE = ObjectIdentifier("2.5.29.24")
+
+
+class NameOID:
+ COMMON_NAME = ObjectIdentifier("2.5.4.3")
+ COUNTRY_NAME = ObjectIdentifier("2.5.4.6")
+ LOCALITY_NAME = ObjectIdentifier("2.5.4.7")
+ STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8")
+ STREET_ADDRESS = ObjectIdentifier("2.5.4.9")
+ ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10")
+ ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11")
+ SERIAL_NUMBER = ObjectIdentifier("2.5.4.5")
+ SURNAME = ObjectIdentifier("2.5.4.4")
+ GIVEN_NAME = ObjectIdentifier("2.5.4.42")
+ TITLE = ObjectIdentifier("2.5.4.12")
+ INITIALS = ObjectIdentifier("2.5.4.43")
+ GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44")
+ X500_UNIQUE_IDENTIFIER = ObjectIdentifier("2.5.4.45")
+ DN_QUALIFIER = ObjectIdentifier("2.5.4.46")
+ PSEUDONYM = ObjectIdentifier("2.5.4.65")
+ USER_ID = ObjectIdentifier("0.9.2342.19200300.100.1.1")
+ DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25")
+ EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1")
+ JURISDICTION_COUNTRY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.3")
+ JURISDICTION_LOCALITY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.1")
+ JURISDICTION_STATE_OR_PROVINCE_NAME = ObjectIdentifier(
+ "1.3.6.1.4.1.311.60.2.1.2"
+ )
+ BUSINESS_CATEGORY = ObjectIdentifier("2.5.4.15")
+ POSTAL_ADDRESS = ObjectIdentifier("2.5.4.16")
+ POSTAL_CODE = ObjectIdentifier("2.5.4.17")
+ INN = ObjectIdentifier("1.2.643.3.131.1.1")
+ OGRN = ObjectIdentifier("1.2.643.100.1")
+ SNILS = ObjectIdentifier("1.2.643.100.3")
+ UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2")
+
+
+class SignatureAlgorithmOID:
+ RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4")
+ RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5")
+ # This is an alternate OID for RSA with SHA1 that is occasionally seen
+ _RSA_WITH_SHA1 = ObjectIdentifier("1.3.14.3.2.29")
+ RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14")
+ RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11")
+ RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12")
+ RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13")
+ RSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.13")
+ RSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.14")
+ RSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.15")
+ RSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.16")
+ RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10")
+ ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1")
+ ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1")
+ ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2")
+ ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3")
+ ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4")
+ ECDSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.9")
+ ECDSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.10")
+ ECDSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.11")
+ ECDSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.12")
+ DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3")
+ DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1")
+ DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2")
+ DSA_WITH_SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.3.3")
+ DSA_WITH_SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.3.4")
+ ED25519 = ObjectIdentifier("1.3.101.112")
+ ED448 = ObjectIdentifier("1.3.101.113")
+ GOSTR3411_94_WITH_3410_2001 = ObjectIdentifier("1.2.643.2.2.3")
+ GOSTR3410_2012_WITH_3411_2012_256 = ObjectIdentifier("1.2.643.7.1.1.3.2")
+ GOSTR3410_2012_WITH_3411_2012_512 = ObjectIdentifier("1.2.643.7.1.1.3.3")
+
+
+_SIG_OIDS_TO_HASH: typing.Dict[
+ ObjectIdentifier, typing.Optional[hashes.HashAlgorithm]
+] = {
+ SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(),
+ SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(),
+ SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(),
+ SignatureAlgorithmOID.RSA_WITH_SHA224: hashes.SHA224(),
+ SignatureAlgorithmOID.RSA_WITH_SHA256: hashes.SHA256(),
+ SignatureAlgorithmOID.RSA_WITH_SHA384: hashes.SHA384(),
+ SignatureAlgorithmOID.RSA_WITH_SHA512: hashes.SHA512(),
+ SignatureAlgorithmOID.RSA_WITH_SHA3_224: hashes.SHA3_224(),
+ SignatureAlgorithmOID.RSA_WITH_SHA3_256: hashes.SHA3_256(),
+ SignatureAlgorithmOID.RSA_WITH_SHA3_384: hashes.SHA3_384(),
+ SignatureAlgorithmOID.RSA_WITH_SHA3_512: hashes.SHA3_512(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA1: hashes.SHA1(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA224: hashes.SHA224(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA256: hashes.SHA256(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA384: hashes.SHA384(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA512: hashes.SHA512(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA3_224: hashes.SHA3_224(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA3_256: hashes.SHA3_256(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA3_384: hashes.SHA3_384(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA3_512: hashes.SHA3_512(),
+ SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(),
+ SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(),
+ SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256(),
+ SignatureAlgorithmOID.ED25519: None,
+ SignatureAlgorithmOID.ED448: None,
+ SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: None,
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: None,
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: None,
+}
+
+
+class ExtendedKeyUsageOID:
+ SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1")
+ CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2")
+ CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3")
+ EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4")
+ TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8")
+ OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9")
+ ANY_EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37.0")
+ SMARTCARD_LOGON = ObjectIdentifier("1.3.6.1.4.1.311.20.2.2")
+ KERBEROS_PKINIT_KDC = ObjectIdentifier("1.3.6.1.5.2.3.5")
+ IPSEC_IKE = ObjectIdentifier("1.3.6.1.5.5.7.3.17")
+ CERTIFICATE_TRANSPARENCY = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.4")
+
+
+class AuthorityInformationAccessOID:
+ CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2")
+ OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1")
+
+
+class SubjectInformationAccessOID:
+ CA_REPOSITORY = ObjectIdentifier("1.3.6.1.5.5.7.48.5")
+
+
+class CertificatePoliciesOID:
+ CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1")
+ CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2")
+ ANY_POLICY = ObjectIdentifier("2.5.29.32.0")
+
+
+class AttributeOID:
+ CHALLENGE_PASSWORD = ObjectIdentifier("1.2.840.113549.1.9.7")
+ UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2")
+
+
+_OID_NAMES = {
+ NameOID.COMMON_NAME: "commonName",
+ NameOID.COUNTRY_NAME: "countryName",
+ NameOID.LOCALITY_NAME: "localityName",
+ NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName",
+ NameOID.STREET_ADDRESS: "streetAddress",
+ NameOID.ORGANIZATION_NAME: "organizationName",
+ NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName",
+ NameOID.SERIAL_NUMBER: "serialNumber",
+ NameOID.SURNAME: "surname",
+ NameOID.GIVEN_NAME: "givenName",
+ NameOID.TITLE: "title",
+ NameOID.GENERATION_QUALIFIER: "generationQualifier",
+ NameOID.X500_UNIQUE_IDENTIFIER: "x500UniqueIdentifier",
+ NameOID.DN_QUALIFIER: "dnQualifier",
+ NameOID.PSEUDONYM: "pseudonym",
+ NameOID.USER_ID: "userID",
+ NameOID.DOMAIN_COMPONENT: "domainComponent",
+ NameOID.EMAIL_ADDRESS: "emailAddress",
+ NameOID.JURISDICTION_COUNTRY_NAME: "jurisdictionCountryName",
+ NameOID.JURISDICTION_LOCALITY_NAME: "jurisdictionLocalityName",
+ NameOID.JURISDICTION_STATE_OR_PROVINCE_NAME: (
+ "jurisdictionStateOrProvinceName"
+ ),
+ NameOID.BUSINESS_CATEGORY: "businessCategory",
+ NameOID.POSTAL_ADDRESS: "postalAddress",
+ NameOID.POSTAL_CODE: "postalCode",
+ NameOID.INN: "INN",
+ NameOID.OGRN: "OGRN",
+ NameOID.SNILS: "SNILS",
+ NameOID.UNSTRUCTURED_NAME: "unstructuredName",
+ SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption",
+ SignatureAlgorithmOID.RSASSA_PSS: "RSASSA-PSS",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512",
+ SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1",
+ SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224",
+ SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256",
+ SignatureAlgorithmOID.ED25519: "ed25519",
+ SignatureAlgorithmOID.ED448: "ed448",
+ SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: (
+ "GOST R 34.11-94 with GOST R 34.10-2001"
+ ),
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: (
+ "GOST R 34.10-2012 with GOST R 34.11-2012 (256 bit)"
+ ),
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: (
+ "GOST R 34.10-2012 with GOST R 34.11-2012 (512 bit)"
+ ),
+ ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth",
+ ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth",
+ ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning",
+ ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection",
+ ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping",
+ ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning",
+ ExtendedKeyUsageOID.SMARTCARD_LOGON: "msSmartcardLogin",
+ ExtendedKeyUsageOID.KERBEROS_PKINIT_KDC: "pkInitKDC",
+ ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes",
+ ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier",
+ ExtensionOID.KEY_USAGE: "keyUsage",
+ ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName",
+ ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName",
+ ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints",
+ ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: (
+ "signedCertificateTimestampList"
+ ),
+ ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS: (
+ "signedCertificateTimestampList"
+ ),
+ ExtensionOID.PRECERT_POISON: "ctPoison",
+ ExtensionOID.MS_CERTIFICATE_TEMPLATE: "msCertificateTemplate",
+ CRLEntryExtensionOID.CRL_REASON: "cRLReason",
+ CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate",
+ CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer",
+ ExtensionOID.NAME_CONSTRAINTS: "nameConstraints",
+ ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints",
+ ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies",
+ ExtensionOID.POLICY_MAPPINGS: "policyMappings",
+ ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier",
+ ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints",
+ ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage",
+ ExtensionOID.FRESHEST_CRL: "freshestCRL",
+ ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy",
+ ExtensionOID.ISSUING_DISTRIBUTION_POINT: ("issuingDistributionPoint"),
+ ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess",
+ ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess",
+ ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck",
+ ExtensionOID.CRL_NUMBER: "cRLNumber",
+ ExtensionOID.DELTA_CRL_INDICATOR: "deltaCRLIndicator",
+ ExtensionOID.TLS_FEATURE: "TLSFeature",
+ AuthorityInformationAccessOID.OCSP: "OCSP",
+ AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers",
+ SubjectInformationAccessOID.CA_REPOSITORY: "caRepository",
+ CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps",
+ CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice",
+ OCSPExtensionOID.NONCE: "OCSPNonce",
+ AttributeOID.CHALLENGE_PASSWORD: "challengePassword",
+}
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/__init__.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/__init__.py
new file mode 100644
index 00000000..b4400aa0
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/__init__.py
@@ -0,0 +1,13 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from typing import Any
+
+
+def default_backend() -> Any:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..3d6578df
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__init__.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__init__.py
new file mode 100644
index 00000000..51b04476
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__init__.py
@@ -0,0 +1,9 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography.hazmat.backends.openssl.backend import backend
+
+__all__ = ["backend"]
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..be352723
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-312.pyc
new file mode 100644
index 00000000..ab26fdd8
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc
new file mode 100644
index 00000000..1282ae3b
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-312.pyc
new file mode 100644
index 00000000..1be2e05b
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-312.pyc
new file mode 100644
index 00000000..8266324c
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-312.pyc
new file mode 100644
index 00000000..ded76cda
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-312.pyc
new file mode 100644
index 00000000..ce01d9ca
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-312.pyc
new file mode 100644
index 00000000..f341cff7
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-312.pyc
new file mode 100644
index 00000000..947ff333
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/aead.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/aead.py
new file mode 100644
index 00000000..b36f535f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/aead.py
@@ -0,0 +1,527 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.exceptions import InvalidTag
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+ from cryptography.hazmat.primitives.ciphers.aead import (
+ AESCCM,
+ AESGCM,
+ AESOCB3,
+ AESSIV,
+ ChaCha20Poly1305,
+ )
+
+ _AEADTypes = typing.Union[
+ AESCCM, AESGCM, AESOCB3, AESSIV, ChaCha20Poly1305
+ ]
+
+
+def _is_evp_aead_supported_cipher(
+ backend: Backend, cipher: _AEADTypes
+) -> bool:
+ """
+ Checks whether the given cipher is supported through
+ EVP_AEAD rather than the normal OpenSSL EVP_CIPHER API.
+ """
+ from cryptography.hazmat.primitives.ciphers.aead import ChaCha20Poly1305
+
+ return backend._lib.Cryptography_HAS_EVP_AEAD and isinstance(
+ cipher, ChaCha20Poly1305
+ )
+
+
+def _aead_cipher_supported(backend: Backend, cipher: _AEADTypes) -> bool:
+ if _is_evp_aead_supported_cipher(backend, cipher):
+ return True
+ else:
+ cipher_name = _evp_cipher_cipher_name(cipher)
+ if backend._fips_enabled and cipher_name not in backend._fips_aead:
+ return False
+ # SIV isn't loaded through get_cipherbyname but instead a new fetch API
+ # only available in 3.0+. But if we know we're on 3.0+ then we know
+ # it's supported.
+ if cipher_name.endswith(b"-siv"):
+ return backend._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER == 1
+ else:
+ return (
+ backend._lib.EVP_get_cipherbyname(cipher_name)
+ != backend._ffi.NULL
+ )
+
+
+def _aead_create_ctx(
+ backend: Backend,
+ cipher: _AEADTypes,
+ key: bytes,
+):
+ if _is_evp_aead_supported_cipher(backend, cipher):
+ return _evp_aead_create_ctx(backend, cipher, key)
+ else:
+ return _evp_cipher_create_ctx(backend, cipher, key)
+
+
+def _encrypt(
+ backend: Backend,
+ cipher: _AEADTypes,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.List[bytes],
+ tag_length: int,
+ ctx: typing.Any = None,
+) -> bytes:
+ if _is_evp_aead_supported_cipher(backend, cipher):
+ return _evp_aead_encrypt(
+ backend, cipher, nonce, data, associated_data, tag_length, ctx
+ )
+ else:
+ return _evp_cipher_encrypt(
+ backend, cipher, nonce, data, associated_data, tag_length, ctx
+ )
+
+
+def _decrypt(
+ backend: Backend,
+ cipher: _AEADTypes,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.List[bytes],
+ tag_length: int,
+ ctx: typing.Any = None,
+) -> bytes:
+ if _is_evp_aead_supported_cipher(backend, cipher):
+ return _evp_aead_decrypt(
+ backend, cipher, nonce, data, associated_data, tag_length, ctx
+ )
+ else:
+ return _evp_cipher_decrypt(
+ backend, cipher, nonce, data, associated_data, tag_length, ctx
+ )
+
+
+def _evp_aead_create_ctx(
+ backend: Backend,
+ cipher: _AEADTypes,
+ key: bytes,
+ tag_len: typing.Optional[int] = None,
+):
+ aead_cipher = _evp_aead_get_cipher(backend, cipher)
+ assert aead_cipher is not None
+ key_ptr = backend._ffi.from_buffer(key)
+ tag_len = (
+ backend._lib.EVP_AEAD_DEFAULT_TAG_LENGTH
+ if tag_len is None
+ else tag_len
+ )
+ ctx = backend._lib.Cryptography_EVP_AEAD_CTX_new(
+ aead_cipher, key_ptr, len(key), tag_len
+ )
+ backend.openssl_assert(ctx != backend._ffi.NULL)
+ ctx = backend._ffi.gc(ctx, backend._lib.EVP_AEAD_CTX_free)
+ return ctx
+
+
+def _evp_aead_get_cipher(backend: Backend, cipher: _AEADTypes):
+ from cryptography.hazmat.primitives.ciphers.aead import (
+ ChaCha20Poly1305,
+ )
+
+ # Currently only ChaCha20-Poly1305 is supported using this API
+ assert isinstance(cipher, ChaCha20Poly1305)
+ return backend._lib.EVP_aead_chacha20_poly1305()
+
+
+def _evp_aead_encrypt(
+ backend: Backend,
+ cipher: _AEADTypes,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.List[bytes],
+ tag_length: int,
+ ctx: typing.Any,
+) -> bytes:
+ assert ctx is not None
+
+ aead_cipher = _evp_aead_get_cipher(backend, cipher)
+ assert aead_cipher is not None
+
+ out_len = backend._ffi.new("size_t *")
+ # max_out_len should be in_len plus the result of
+ # EVP_AEAD_max_overhead.
+ max_out_len = len(data) + backend._lib.EVP_AEAD_max_overhead(aead_cipher)
+ out_buf = backend._ffi.new("uint8_t[]", max_out_len)
+ data_ptr = backend._ffi.from_buffer(data)
+ nonce_ptr = backend._ffi.from_buffer(nonce)
+ aad = b"".join(associated_data)
+ aad_ptr = backend._ffi.from_buffer(aad)
+
+ res = backend._lib.EVP_AEAD_CTX_seal(
+ ctx,
+ out_buf,
+ out_len,
+ max_out_len,
+ nonce_ptr,
+ len(nonce),
+ data_ptr,
+ len(data),
+ aad_ptr,
+ len(aad),
+ )
+ backend.openssl_assert(res == 1)
+ encrypted_data = backend._ffi.buffer(out_buf, out_len[0])[:]
+ return encrypted_data
+
+
+def _evp_aead_decrypt(
+ backend: Backend,
+ cipher: _AEADTypes,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.List[bytes],
+ tag_length: int,
+ ctx: typing.Any,
+) -> bytes:
+ if len(data) < tag_length:
+ raise InvalidTag
+
+ assert ctx is not None
+
+ out_len = backend._ffi.new("size_t *")
+ # max_out_len should at least in_len
+ max_out_len = len(data)
+ out_buf = backend._ffi.new("uint8_t[]", max_out_len)
+ data_ptr = backend._ffi.from_buffer(data)
+ nonce_ptr = backend._ffi.from_buffer(nonce)
+ aad = b"".join(associated_data)
+ aad_ptr = backend._ffi.from_buffer(aad)
+
+ res = backend._lib.EVP_AEAD_CTX_open(
+ ctx,
+ out_buf,
+ out_len,
+ max_out_len,
+ nonce_ptr,
+ len(nonce),
+ data_ptr,
+ len(data),
+ aad_ptr,
+ len(aad),
+ )
+
+ if res == 0:
+ backend._consume_errors()
+ raise InvalidTag
+
+ decrypted_data = backend._ffi.buffer(out_buf, out_len[0])[:]
+ return decrypted_data
+
+
+_ENCRYPT = 1
+_DECRYPT = 0
+
+
+def _evp_cipher_cipher_name(cipher: _AEADTypes) -> bytes:
+ from cryptography.hazmat.primitives.ciphers.aead import (
+ AESCCM,
+ AESGCM,
+ AESOCB3,
+ AESSIV,
+ ChaCha20Poly1305,
+ )
+
+ if isinstance(cipher, ChaCha20Poly1305):
+ return b"chacha20-poly1305"
+ elif isinstance(cipher, AESCCM):
+ return f"aes-{len(cipher._key) * 8}-ccm".encode("ascii")
+ elif isinstance(cipher, AESOCB3):
+ return f"aes-{len(cipher._key) * 8}-ocb".encode("ascii")
+ elif isinstance(cipher, AESSIV):
+ return f"aes-{len(cipher._key) * 8 // 2}-siv".encode("ascii")
+ else:
+ assert isinstance(cipher, AESGCM)
+ return f"aes-{len(cipher._key) * 8}-gcm".encode("ascii")
+
+
+def _evp_cipher(cipher_name: bytes, backend: Backend):
+ if cipher_name.endswith(b"-siv"):
+ evp_cipher = backend._lib.EVP_CIPHER_fetch(
+ backend._ffi.NULL,
+ cipher_name,
+ backend._ffi.NULL,
+ )
+ backend.openssl_assert(evp_cipher != backend._ffi.NULL)
+ evp_cipher = backend._ffi.gc(evp_cipher, backend._lib.EVP_CIPHER_free)
+ else:
+ evp_cipher = backend._lib.EVP_get_cipherbyname(cipher_name)
+ backend.openssl_assert(evp_cipher != backend._ffi.NULL)
+
+ return evp_cipher
+
+
+def _evp_cipher_create_ctx(
+ backend: Backend,
+ cipher: _AEADTypes,
+ key: bytes,
+):
+ ctx = backend._lib.EVP_CIPHER_CTX_new()
+ backend.openssl_assert(ctx != backend._ffi.NULL)
+ ctx = backend._ffi.gc(ctx, backend._lib.EVP_CIPHER_CTX_free)
+ cipher_name = _evp_cipher_cipher_name(cipher)
+ evp_cipher = _evp_cipher(cipher_name, backend)
+ key_ptr = backend._ffi.from_buffer(key)
+ res = backend._lib.EVP_CipherInit_ex(
+ ctx,
+ evp_cipher,
+ backend._ffi.NULL,
+ key_ptr,
+ backend._ffi.NULL,
+ 0,
+ )
+ backend.openssl_assert(res != 0)
+ return ctx
+
+
+def _evp_cipher_aead_setup(
+ backend: Backend,
+ cipher_name: bytes,
+ key: bytes,
+ nonce: bytes,
+ tag: typing.Optional[bytes],
+ tag_len: int,
+ operation: int,
+):
+ evp_cipher = _evp_cipher(cipher_name, backend)
+ ctx = backend._lib.EVP_CIPHER_CTX_new()
+ ctx = backend._ffi.gc(ctx, backend._lib.EVP_CIPHER_CTX_free)
+ res = backend._lib.EVP_CipherInit_ex(
+ ctx,
+ evp_cipher,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ int(operation == _ENCRYPT),
+ )
+ backend.openssl_assert(res != 0)
+ # CCM requires the IVLEN to be set before calling SET_TAG on decrypt
+ res = backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx,
+ backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
+ len(nonce),
+ backend._ffi.NULL,
+ )
+ backend.openssl_assert(res != 0)
+ if operation == _DECRYPT:
+ assert tag is not None
+ _evp_cipher_set_tag(backend, ctx, tag)
+ elif cipher_name.endswith(b"-ccm"):
+ res = backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx,
+ backend._lib.EVP_CTRL_AEAD_SET_TAG,
+ tag_len,
+ backend._ffi.NULL,
+ )
+ backend.openssl_assert(res != 0)
+
+ nonce_ptr = backend._ffi.from_buffer(nonce)
+ key_ptr = backend._ffi.from_buffer(key)
+ res = backend._lib.EVP_CipherInit_ex(
+ ctx,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ key_ptr,
+ nonce_ptr,
+ int(operation == _ENCRYPT),
+ )
+ backend.openssl_assert(res != 0)
+ return ctx
+
+
+def _evp_cipher_set_tag(backend, ctx, tag: bytes) -> None:
+ tag_ptr = backend._ffi.from_buffer(tag)
+ res = backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag_ptr
+ )
+ backend.openssl_assert(res != 0)
+
+
+def _evp_cipher_set_nonce_operation(
+ backend, ctx, nonce: bytes, operation: int
+) -> None:
+ nonce_ptr = backend._ffi.from_buffer(nonce)
+ res = backend._lib.EVP_CipherInit_ex(
+ ctx,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ nonce_ptr,
+ int(operation == _ENCRYPT),
+ )
+ backend.openssl_assert(res != 0)
+
+
+def _evp_cipher_set_length(backend: Backend, ctx, data_len: int) -> None:
+ intptr = backend._ffi.new("int *")
+ res = backend._lib.EVP_CipherUpdate(
+ ctx, backend._ffi.NULL, intptr, backend._ffi.NULL, data_len
+ )
+ backend.openssl_assert(res != 0)
+
+
+def _evp_cipher_process_aad(
+ backend: Backend, ctx, associated_data: bytes
+) -> None:
+ outlen = backend._ffi.new("int *")
+ a_data_ptr = backend._ffi.from_buffer(associated_data)
+ res = backend._lib.EVP_CipherUpdate(
+ ctx, backend._ffi.NULL, outlen, a_data_ptr, len(associated_data)
+ )
+ backend.openssl_assert(res != 0)
+
+
+def _evp_cipher_process_data(backend: Backend, ctx, data: bytes) -> bytes:
+ outlen = backend._ffi.new("int *")
+ buf = backend._ffi.new("unsigned char[]", len(data))
+ data_ptr = backend._ffi.from_buffer(data)
+ res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, data_ptr, len(data))
+ if res == 0:
+ # AES SIV can error here if the data is invalid on decrypt
+ backend._consume_errors()
+ raise InvalidTag
+ return backend._ffi.buffer(buf, outlen[0])[:]
+
+
+def _evp_cipher_encrypt(
+ backend: Backend,
+ cipher: _AEADTypes,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.List[bytes],
+ tag_length: int,
+ ctx: typing.Any = None,
+) -> bytes:
+ from cryptography.hazmat.primitives.ciphers.aead import AESCCM, AESSIV
+
+ if ctx is None:
+ cipher_name = _evp_cipher_cipher_name(cipher)
+ ctx = _evp_cipher_aead_setup(
+ backend,
+ cipher_name,
+ cipher._key,
+ nonce,
+ None,
+ tag_length,
+ _ENCRYPT,
+ )
+ else:
+ _evp_cipher_set_nonce_operation(backend, ctx, nonce, _ENCRYPT)
+
+ # CCM requires us to pass the length of the data before processing
+ # anything.
+ # However calling this with any other AEAD results in an error
+ if isinstance(cipher, AESCCM):
+ _evp_cipher_set_length(backend, ctx, len(data))
+
+ for ad in associated_data:
+ _evp_cipher_process_aad(backend, ctx, ad)
+ processed_data = _evp_cipher_process_data(backend, ctx, data)
+ outlen = backend._ffi.new("int *")
+ # All AEADs we support besides OCB are streaming so they return nothing
+ # in finalization. OCB can return up to (16 byte block - 1) bytes so
+ # we need a buffer here too.
+ buf = backend._ffi.new("unsigned char[]", 16)
+ res = backend._lib.EVP_CipherFinal_ex(ctx, buf, outlen)
+ backend.openssl_assert(res != 0)
+ processed_data += backend._ffi.buffer(buf, outlen[0])[:]
+ tag_buf = backend._ffi.new("unsigned char[]", tag_length)
+ res = backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx, backend._lib.EVP_CTRL_AEAD_GET_TAG, tag_length, tag_buf
+ )
+ backend.openssl_assert(res != 0)
+ tag = backend._ffi.buffer(tag_buf)[:]
+
+ if isinstance(cipher, AESSIV):
+ # RFC 5297 defines the output as IV || C, where the tag we generate
+ # is the "IV" and C is the ciphertext. This is the opposite of our
+ # other AEADs, which are Ciphertext || Tag
+ backend.openssl_assert(len(tag) == 16)
+ return tag + processed_data
+ else:
+ return processed_data + tag
+
+
+def _evp_cipher_decrypt(
+ backend: Backend,
+ cipher: _AEADTypes,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.List[bytes],
+ tag_length: int,
+ ctx: typing.Any = None,
+) -> bytes:
+ from cryptography.hazmat.primitives.ciphers.aead import AESCCM, AESSIV
+
+ if len(data) < tag_length:
+ raise InvalidTag
+
+ if isinstance(cipher, AESSIV):
+ # RFC 5297 defines the output as IV || C, where the tag we generate
+ # is the "IV" and C is the ciphertext. This is the opposite of our
+ # other AEADs, which are Ciphertext || Tag
+ tag = data[:tag_length]
+ data = data[tag_length:]
+ else:
+ tag = data[-tag_length:]
+ data = data[:-tag_length]
+ if ctx is None:
+ cipher_name = _evp_cipher_cipher_name(cipher)
+ ctx = _evp_cipher_aead_setup(
+ backend,
+ cipher_name,
+ cipher._key,
+ nonce,
+ tag,
+ tag_length,
+ _DECRYPT,
+ )
+ else:
+ _evp_cipher_set_nonce_operation(backend, ctx, nonce, _DECRYPT)
+ _evp_cipher_set_tag(backend, ctx, tag)
+
+ # CCM requires us to pass the length of the data before processing
+ # anything.
+ # However calling this with any other AEAD results in an error
+ if isinstance(cipher, AESCCM):
+ _evp_cipher_set_length(backend, ctx, len(data))
+
+ for ad in associated_data:
+ _evp_cipher_process_aad(backend, ctx, ad)
+ # CCM has a different error path if the tag doesn't match. Errors are
+ # raised in Update and Final is irrelevant.
+ if isinstance(cipher, AESCCM):
+ outlen = backend._ffi.new("int *")
+ buf = backend._ffi.new("unsigned char[]", len(data))
+ d_ptr = backend._ffi.from_buffer(data)
+ res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, d_ptr, len(data))
+ if res != 1:
+ backend._consume_errors()
+ raise InvalidTag
+
+ processed_data = backend._ffi.buffer(buf, outlen[0])[:]
+ else:
+ processed_data = _evp_cipher_process_data(backend, ctx, data)
+ outlen = backend._ffi.new("int *")
+ # OCB can return up to 15 bytes (16 byte block - 1) in finalization
+ buf = backend._ffi.new("unsigned char[]", 16)
+ res = backend._lib.EVP_CipherFinal_ex(ctx, buf, outlen)
+ processed_data += backend._ffi.buffer(buf, outlen[0])[:]
+ if res == 0:
+ backend._consume_errors()
+ raise InvalidTag
+
+ return processed_data
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/backend.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/backend.py
new file mode 100644
index 00000000..f1c79008
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/backend.py
@@ -0,0 +1,1938 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import collections
+import contextlib
+import itertools
+import typing
+from contextlib import contextmanager
+
+from cryptography import utils, x509
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.backends.openssl import aead
+from cryptography.hazmat.backends.openssl.ciphers import _CipherContext
+from cryptography.hazmat.backends.openssl.cmac import _CMACContext
+from cryptography.hazmat.backends.openssl.ec import (
+ _EllipticCurvePrivateKey,
+ _EllipticCurvePublicKey,
+)
+from cryptography.hazmat.backends.openssl.rsa import (
+ _RSAPrivateKey,
+ _RSAPublicKey,
+)
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.bindings.openssl import binding
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding
+from cryptography.hazmat.primitives.asymmetric import (
+ dh,
+ dsa,
+ ec,
+ ed448,
+ ed25519,
+ rsa,
+ x448,
+ x25519,
+)
+from cryptography.hazmat.primitives.asymmetric.padding import (
+ MGF1,
+ OAEP,
+ PSS,
+ PKCS1v15,
+)
+from cryptography.hazmat.primitives.asymmetric.types import (
+ PrivateKeyTypes,
+ PublicKeyTypes,
+)
+from cryptography.hazmat.primitives.ciphers import (
+ BlockCipherAlgorithm,
+ CipherAlgorithm,
+)
+from cryptography.hazmat.primitives.ciphers.algorithms import (
+ AES,
+ AES128,
+ AES256,
+ ARC4,
+ SM4,
+ Camellia,
+ ChaCha20,
+ TripleDES,
+ _BlowfishInternal,
+ _CAST5Internal,
+ _IDEAInternal,
+ _SEEDInternal,
+)
+from cryptography.hazmat.primitives.ciphers.modes import (
+ CBC,
+ CFB,
+ CFB8,
+ CTR,
+ ECB,
+ GCM,
+ OFB,
+ XTS,
+ Mode,
+)
+from cryptography.hazmat.primitives.serialization import ssh
+from cryptography.hazmat.primitives.serialization.pkcs12 import (
+ PBES,
+ PKCS12Certificate,
+ PKCS12KeyAndCertificates,
+ PKCS12PrivateKeyTypes,
+ _PKCS12CATypes,
+)
+
+_MemoryBIO = collections.namedtuple("_MemoryBIO", ["bio", "char_ptr"])
+
+
+# Not actually supported, just used as a marker for some serialization tests.
+class _RC2:
+ pass
+
+
+class Backend:
+ """
+ OpenSSL API binding interfaces.
+ """
+
+ name = "openssl"
+
+ # FIPS has opinions about acceptable algorithms and key sizes, but the
+ # disallowed algorithms are still present in OpenSSL. They just error if
+ # you try to use them. To avoid that we allowlist the algorithms in
+ # FIPS 140-3. This isn't ideal, but FIPS 140-3 is trash so here we are.
+ _fips_aead = {
+ b"aes-128-ccm",
+ b"aes-192-ccm",
+ b"aes-256-ccm",
+ b"aes-128-gcm",
+ b"aes-192-gcm",
+ b"aes-256-gcm",
+ }
+ # TripleDES encryption is disallowed/deprecated throughout 2023 in
+ # FIPS 140-3. To keep it simple we denylist any use of TripleDES (TDEA).
+ _fips_ciphers = (AES,)
+ # Sometimes SHA1 is still permissible. That logic is contained
+ # within the various *_supported methods.
+ _fips_hashes = (
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+ hashes.SHA512_224,
+ hashes.SHA512_256,
+ hashes.SHA3_224,
+ hashes.SHA3_256,
+ hashes.SHA3_384,
+ hashes.SHA3_512,
+ hashes.SHAKE128,
+ hashes.SHAKE256,
+ )
+ _fips_ecdh_curves = (
+ ec.SECP224R1,
+ ec.SECP256R1,
+ ec.SECP384R1,
+ ec.SECP521R1,
+ )
+ _fips_rsa_min_key_size = 2048
+ _fips_rsa_min_public_exponent = 65537
+ _fips_dsa_min_modulus = 1 << 2048
+ _fips_dh_min_key_size = 2048
+ _fips_dh_min_modulus = 1 << _fips_dh_min_key_size
+
+ def __init__(self) -> None:
+ self._binding = binding.Binding()
+ self._ffi = self._binding.ffi
+ self._lib = self._binding.lib
+ self._fips_enabled = rust_openssl.is_fips_enabled()
+
+ self._cipher_registry: typing.Dict[
+ typing.Tuple[typing.Type[CipherAlgorithm], typing.Type[Mode]],
+ typing.Callable,
+ ] = {}
+ self._register_default_ciphers()
+ self._dh_types = [self._lib.EVP_PKEY_DH]
+ if self._lib.Cryptography_HAS_EVP_PKEY_DHX:
+ self._dh_types.append(self._lib.EVP_PKEY_DHX)
+
+ def __repr__(self) -> str:
+ return "".format(
+ self.openssl_version_text(),
+ self._fips_enabled,
+ self._binding._legacy_provider_loaded,
+ )
+
+ def openssl_assert(
+ self,
+ ok: bool,
+ errors: typing.Optional[typing.List[rust_openssl.OpenSSLError]] = None,
+ ) -> None:
+ return binding._openssl_assert(self._lib, ok, errors=errors)
+
+ def _enable_fips(self) -> None:
+ # This function enables FIPS mode for OpenSSL 3.0.0 on installs that
+ # have the FIPS provider installed properly.
+ self._binding._enable_fips()
+ assert rust_openssl.is_fips_enabled()
+ self._fips_enabled = rust_openssl.is_fips_enabled()
+
+ def openssl_version_text(self) -> str:
+ """
+ Friendly string name of the loaded OpenSSL library. This is not
+ necessarily the same version as it was compiled against.
+
+ Example: OpenSSL 1.1.1d 10 Sep 2019
+ """
+ return self._ffi.string(
+ self._lib.OpenSSL_version(self._lib.OPENSSL_VERSION)
+ ).decode("ascii")
+
+ def openssl_version_number(self) -> int:
+ return self._lib.OpenSSL_version_num()
+
+ def _evp_md_from_algorithm(self, algorithm: hashes.HashAlgorithm):
+ if algorithm.name == "blake2b" or algorithm.name == "blake2s":
+ alg = "{}{}".format(
+ algorithm.name, algorithm.digest_size * 8
+ ).encode("ascii")
+ else:
+ alg = algorithm.name.encode("ascii")
+
+ evp_md = self._lib.EVP_get_digestbyname(alg)
+ return evp_md
+
+ def _evp_md_non_null_from_algorithm(self, algorithm: hashes.HashAlgorithm):
+ evp_md = self._evp_md_from_algorithm(algorithm)
+ self.openssl_assert(evp_md != self._ffi.NULL)
+ return evp_md
+
+ def hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool:
+ if self._fips_enabled and not isinstance(algorithm, self._fips_hashes):
+ return False
+
+ evp_md = self._evp_md_from_algorithm(algorithm)
+ return evp_md != self._ffi.NULL
+
+ def signature_hash_supported(
+ self, algorithm: hashes.HashAlgorithm
+ ) -> bool:
+ # Dedicated check for hashing algorithm use in message digest for
+ # signatures, e.g. RSA PKCS#1 v1.5 SHA1 (sha1WithRSAEncryption).
+ if self._fips_enabled and isinstance(algorithm, hashes.SHA1):
+ return False
+ return self.hash_supported(algorithm)
+
+ def scrypt_supported(self) -> bool:
+ if self._fips_enabled:
+ return False
+ else:
+ return self._lib.Cryptography_HAS_SCRYPT == 1
+
+ def hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool:
+ # FIPS mode still allows SHA1 for HMAC
+ if self._fips_enabled and isinstance(algorithm, hashes.SHA1):
+ return True
+
+ return self.hash_supported(algorithm)
+
+ def cipher_supported(self, cipher: CipherAlgorithm, mode: Mode) -> bool:
+ if self._fips_enabled:
+ # FIPS mode requires AES. TripleDES is disallowed/deprecated in
+ # FIPS 140-3.
+ if not isinstance(cipher, self._fips_ciphers):
+ return False
+
+ try:
+ adapter = self._cipher_registry[type(cipher), type(mode)]
+ except KeyError:
+ return False
+ evp_cipher = adapter(self, cipher, mode)
+ return self._ffi.NULL != evp_cipher
+
+ def register_cipher_adapter(self, cipher_cls, mode_cls, adapter) -> None:
+ if (cipher_cls, mode_cls) in self._cipher_registry:
+ raise ValueError(
+ "Duplicate registration for: {} {}.".format(
+ cipher_cls, mode_cls
+ )
+ )
+ self._cipher_registry[cipher_cls, mode_cls] = adapter
+
+ def _register_default_ciphers(self) -> None:
+ for cipher_cls in [AES, AES128, AES256]:
+ for mode_cls in [CBC, CTR, ECB, OFB, CFB, CFB8, GCM]:
+ self.register_cipher_adapter(
+ cipher_cls,
+ mode_cls,
+ GetCipherByName(
+ "{cipher.name}-{cipher.key_size}-{mode.name}"
+ ),
+ )
+ for mode_cls in [CBC, CTR, ECB, OFB, CFB]:
+ self.register_cipher_adapter(
+ Camellia,
+ mode_cls,
+ GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}"),
+ )
+ for mode_cls in [CBC, CFB, CFB8, OFB]:
+ self.register_cipher_adapter(
+ TripleDES, mode_cls, GetCipherByName("des-ede3-{mode.name}")
+ )
+ self.register_cipher_adapter(
+ TripleDES, ECB, GetCipherByName("des-ede3")
+ )
+ self.register_cipher_adapter(
+ ChaCha20, type(None), GetCipherByName("chacha20")
+ )
+ self.register_cipher_adapter(AES, XTS, _get_xts_cipher)
+ for mode_cls in [ECB, CBC, OFB, CFB, CTR]:
+ self.register_cipher_adapter(
+ SM4, mode_cls, GetCipherByName("sm4-{mode.name}")
+ )
+ # Don't register legacy ciphers if they're unavailable. Hypothetically
+ # this wouldn't be necessary because we test availability by seeing if
+ # we get an EVP_CIPHER * in the _CipherContext __init__, but OpenSSL 3
+ # will return a valid pointer even though the cipher is unavailable.
+ if (
+ self._binding._legacy_provider_loaded
+ or not self._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER
+ ):
+ for mode_cls in [CBC, CFB, OFB, ECB]:
+ self.register_cipher_adapter(
+ _BlowfishInternal,
+ mode_cls,
+ GetCipherByName("bf-{mode.name}"),
+ )
+ for mode_cls in [CBC, CFB, OFB, ECB]:
+ self.register_cipher_adapter(
+ _SEEDInternal,
+ mode_cls,
+ GetCipherByName("seed-{mode.name}"),
+ )
+ for cipher_cls, mode_cls in itertools.product(
+ [_CAST5Internal, _IDEAInternal],
+ [CBC, OFB, CFB, ECB],
+ ):
+ self.register_cipher_adapter(
+ cipher_cls,
+ mode_cls,
+ GetCipherByName("{cipher.name}-{mode.name}"),
+ )
+ self.register_cipher_adapter(
+ ARC4, type(None), GetCipherByName("rc4")
+ )
+ # We don't actually support RC2, this is just used by some tests.
+ self.register_cipher_adapter(
+ _RC2, type(None), GetCipherByName("rc2")
+ )
+
+ def create_symmetric_encryption_ctx(
+ self, cipher: CipherAlgorithm, mode: Mode
+ ) -> _CipherContext:
+ return _CipherContext(self, cipher, mode, _CipherContext._ENCRYPT)
+
+ def create_symmetric_decryption_ctx(
+ self, cipher: CipherAlgorithm, mode: Mode
+ ) -> _CipherContext:
+ return _CipherContext(self, cipher, mode, _CipherContext._DECRYPT)
+
+ def pbkdf2_hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool:
+ return self.hmac_supported(algorithm)
+
+ def _consume_errors(self) -> typing.List[rust_openssl.OpenSSLError]:
+ return rust_openssl.capture_error_stack()
+
+ def _bn_to_int(self, bn) -> int:
+ assert bn != self._ffi.NULL
+ self.openssl_assert(not self._lib.BN_is_negative(bn))
+
+ bn_num_bytes = self._lib.BN_num_bytes(bn)
+ bin_ptr = self._ffi.new("unsigned char[]", bn_num_bytes)
+ bin_len = self._lib.BN_bn2bin(bn, bin_ptr)
+ # A zero length means the BN has value 0
+ self.openssl_assert(bin_len >= 0)
+ val = int.from_bytes(self._ffi.buffer(bin_ptr)[:bin_len], "big")
+ return val
+
+ def _int_to_bn(self, num: int):
+ """
+ Converts a python integer to a BIGNUM. The returned BIGNUM will not
+ be garbage collected (to support adding them to structs that take
+ ownership of the object). Be sure to register it for GC if it will
+ be discarded after use.
+ """
+ binary = num.to_bytes(int(num.bit_length() / 8.0 + 1), "big")
+ bn_ptr = self._lib.BN_bin2bn(binary, len(binary), self._ffi.NULL)
+ self.openssl_assert(bn_ptr != self._ffi.NULL)
+ return bn_ptr
+
+ def generate_rsa_private_key(
+ self, public_exponent: int, key_size: int
+ ) -> rsa.RSAPrivateKey:
+ rsa._verify_rsa_parameters(public_exponent, key_size)
+
+ rsa_cdata = self._lib.RSA_new()
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+
+ bn = self._int_to_bn(public_exponent)
+ bn = self._ffi.gc(bn, self._lib.BN_free)
+
+ res = self._lib.RSA_generate_key_ex(
+ rsa_cdata, key_size, bn, self._ffi.NULL
+ )
+ self.openssl_assert(res == 1)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+
+ # We can skip RSA key validation here since we just generated the key
+ return _RSAPrivateKey(
+ self, rsa_cdata, evp_pkey, unsafe_skip_rsa_key_validation=True
+ )
+
+ def generate_rsa_parameters_supported(
+ self, public_exponent: int, key_size: int
+ ) -> bool:
+ return (
+ public_exponent >= 3
+ and public_exponent & 1 != 0
+ and key_size >= 512
+ )
+
+ def load_rsa_private_numbers(
+ self,
+ numbers: rsa.RSAPrivateNumbers,
+ unsafe_skip_rsa_key_validation: bool,
+ ) -> rsa.RSAPrivateKey:
+ rsa._check_private_key_components(
+ numbers.p,
+ numbers.q,
+ numbers.d,
+ numbers.dmp1,
+ numbers.dmq1,
+ numbers.iqmp,
+ numbers.public_numbers.e,
+ numbers.public_numbers.n,
+ )
+ rsa_cdata = self._lib.RSA_new()
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ p = self._int_to_bn(numbers.p)
+ q = self._int_to_bn(numbers.q)
+ d = self._int_to_bn(numbers.d)
+ dmp1 = self._int_to_bn(numbers.dmp1)
+ dmq1 = self._int_to_bn(numbers.dmq1)
+ iqmp = self._int_to_bn(numbers.iqmp)
+ e = self._int_to_bn(numbers.public_numbers.e)
+ n = self._int_to_bn(numbers.public_numbers.n)
+ res = self._lib.RSA_set0_factors(rsa_cdata, p, q)
+ self.openssl_assert(res == 1)
+ res = self._lib.RSA_set0_key(rsa_cdata, n, e, d)
+ self.openssl_assert(res == 1)
+ res = self._lib.RSA_set0_crt_params(rsa_cdata, dmp1, dmq1, iqmp)
+ self.openssl_assert(res == 1)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+
+ return _RSAPrivateKey(
+ self,
+ rsa_cdata,
+ evp_pkey,
+ unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation,
+ )
+
+ def load_rsa_public_numbers(
+ self, numbers: rsa.RSAPublicNumbers
+ ) -> rsa.RSAPublicKey:
+ rsa._check_public_key_components(numbers.e, numbers.n)
+ rsa_cdata = self._lib.RSA_new()
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ e = self._int_to_bn(numbers.e)
+ n = self._int_to_bn(numbers.n)
+ res = self._lib.RSA_set0_key(rsa_cdata, n, e, self._ffi.NULL)
+ self.openssl_assert(res == 1)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+
+ return _RSAPublicKey(self, rsa_cdata, evp_pkey)
+
+ def _create_evp_pkey_gc(self):
+ evp_pkey = self._lib.EVP_PKEY_new()
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ return evp_pkey
+
+ def _rsa_cdata_to_evp_pkey(self, rsa_cdata):
+ evp_pkey = self._create_evp_pkey_gc()
+ res = self._lib.EVP_PKEY_set1_RSA(evp_pkey, rsa_cdata)
+ self.openssl_assert(res == 1)
+ return evp_pkey
+
+ def _bytes_to_bio(self, data: bytes) -> _MemoryBIO:
+ """
+ Return a _MemoryBIO namedtuple of (BIO, char*).
+
+ The char* is the storage for the BIO and it must stay alive until the
+ BIO is finished with.
+ """
+ data_ptr = self._ffi.from_buffer(data)
+ bio = self._lib.BIO_new_mem_buf(data_ptr, len(data))
+ self.openssl_assert(bio != self._ffi.NULL)
+
+ return _MemoryBIO(self._ffi.gc(bio, self._lib.BIO_free), data_ptr)
+
+ def _create_mem_bio_gc(self):
+ """
+ Creates an empty memory BIO.
+ """
+ bio_method = self._lib.BIO_s_mem()
+ self.openssl_assert(bio_method != self._ffi.NULL)
+ bio = self._lib.BIO_new(bio_method)
+ self.openssl_assert(bio != self._ffi.NULL)
+ bio = self._ffi.gc(bio, self._lib.BIO_free)
+ return bio
+
+ def _read_mem_bio(self, bio) -> bytes:
+ """
+ Reads a memory BIO. This only works on memory BIOs.
+ """
+ buf = self._ffi.new("char **")
+ buf_len = self._lib.BIO_get_mem_data(bio, buf)
+ self.openssl_assert(buf_len > 0)
+ self.openssl_assert(buf[0] != self._ffi.NULL)
+ bio_data = self._ffi.buffer(buf[0], buf_len)[:]
+ return bio_data
+
+ def _evp_pkey_to_private_key(
+ self, evp_pkey, unsafe_skip_rsa_key_validation: bool
+ ) -> PrivateKeyTypes:
+ """
+ Return the appropriate type of PrivateKey given an evp_pkey cdata
+ pointer.
+ """
+
+ key_type = self._lib.EVP_PKEY_id(evp_pkey)
+
+ if key_type == self._lib.EVP_PKEY_RSA:
+ rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey)
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ return _RSAPrivateKey(
+ self,
+ rsa_cdata,
+ evp_pkey,
+ unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation,
+ )
+ elif (
+ key_type == self._lib.EVP_PKEY_RSA_PSS
+ and not self._lib.CRYPTOGRAPHY_IS_LIBRESSL
+ and not self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+ and not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111E
+ ):
+ # At the moment the way we handle RSA PSS keys is to strip the
+ # PSS constraints from them and treat them as normal RSA keys
+ # Unfortunately the RSA * itself tracks this data so we need to
+ # extract, serialize, and reload it without the constraints.
+ rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey)
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ bio = self._create_mem_bio_gc()
+ res = self._lib.i2d_RSAPrivateKey_bio(bio, rsa_cdata)
+ self.openssl_assert(res == 1)
+ return self.load_der_private_key(
+ self._read_mem_bio(bio),
+ password=None,
+ unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation,
+ )
+ elif key_type == self._lib.EVP_PKEY_DSA:
+ return rust_openssl.dsa.private_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == self._lib.EVP_PKEY_EC:
+ ec_cdata = self._lib.EVP_PKEY_get1_EC_KEY(evp_pkey)
+ self.openssl_assert(ec_cdata != self._ffi.NULL)
+ ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
+ return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
+ elif key_type in self._dh_types:
+ return rust_openssl.dh.private_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED25519", None):
+ # EVP_PKEY_ED25519 is not present in CRYPTOGRAPHY_IS_LIBRESSL
+ return rust_openssl.ed25519.private_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == getattr(self._lib, "EVP_PKEY_X448", None):
+ # EVP_PKEY_X448 is not present in CRYPTOGRAPHY_IS_LIBRESSL
+ return rust_openssl.x448.private_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == self._lib.EVP_PKEY_X25519:
+ return rust_openssl.x25519.private_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED448", None):
+ # EVP_PKEY_ED448 is not present in CRYPTOGRAPHY_IS_LIBRESSL
+ return rust_openssl.ed448.private_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ else:
+ raise UnsupportedAlgorithm("Unsupported key type.")
+
+ def _evp_pkey_to_public_key(self, evp_pkey) -> PublicKeyTypes:
+ """
+ Return the appropriate type of PublicKey given an evp_pkey cdata
+ pointer.
+ """
+
+ key_type = self._lib.EVP_PKEY_id(evp_pkey)
+
+ if key_type == self._lib.EVP_PKEY_RSA:
+ rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey)
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ return _RSAPublicKey(self, rsa_cdata, evp_pkey)
+ elif (
+ key_type == self._lib.EVP_PKEY_RSA_PSS
+ and not self._lib.CRYPTOGRAPHY_IS_LIBRESSL
+ and not self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+ and not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111E
+ ):
+ rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey)
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ bio = self._create_mem_bio_gc()
+ res = self._lib.i2d_RSAPublicKey_bio(bio, rsa_cdata)
+ self.openssl_assert(res == 1)
+ return self.load_der_public_key(self._read_mem_bio(bio))
+ elif key_type == self._lib.EVP_PKEY_DSA:
+ return rust_openssl.dsa.public_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == self._lib.EVP_PKEY_EC:
+ ec_cdata = self._lib.EVP_PKEY_get1_EC_KEY(evp_pkey)
+ if ec_cdata == self._ffi.NULL:
+ errors = self._consume_errors()
+ raise ValueError("Unable to load EC key", errors)
+ ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
+ return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey)
+ elif key_type in self._dh_types:
+ return rust_openssl.dh.public_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED25519", None):
+ # EVP_PKEY_ED25519 is not present in CRYPTOGRAPHY_IS_LIBRESSL
+ return rust_openssl.ed25519.public_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == getattr(self._lib, "EVP_PKEY_X448", None):
+ # EVP_PKEY_X448 is not present in CRYPTOGRAPHY_IS_LIBRESSL
+ return rust_openssl.x448.public_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == self._lib.EVP_PKEY_X25519:
+ return rust_openssl.x25519.public_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED448", None):
+ # EVP_PKEY_ED448 is not present in CRYPTOGRAPHY_IS_LIBRESSL
+ return rust_openssl.ed448.public_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ else:
+ raise UnsupportedAlgorithm("Unsupported key type.")
+
+ def _oaep_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool:
+ if self._fips_enabled and isinstance(algorithm, hashes.SHA1):
+ return False
+
+ return isinstance(
+ algorithm,
+ (
+ hashes.SHA1,
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+ ),
+ )
+
+ def rsa_padding_supported(self, padding: AsymmetricPadding) -> bool:
+ if isinstance(padding, PKCS1v15):
+ return True
+ elif isinstance(padding, PSS) and isinstance(padding._mgf, MGF1):
+ # SHA1 is permissible in MGF1 in FIPS even when SHA1 is blocked
+ # as signature algorithm.
+ if self._fips_enabled and isinstance(
+ padding._mgf._algorithm, hashes.SHA1
+ ):
+ return True
+ else:
+ return self.hash_supported(padding._mgf._algorithm)
+ elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1):
+ return self._oaep_hash_supported(
+ padding._mgf._algorithm
+ ) and self._oaep_hash_supported(padding._algorithm)
+ else:
+ return False
+
+ def rsa_encryption_supported(self, padding: AsymmetricPadding) -> bool:
+ if self._fips_enabled and isinstance(padding, PKCS1v15):
+ return False
+ else:
+ return self.rsa_padding_supported(padding)
+
+ def generate_dsa_parameters(self, key_size: int) -> dsa.DSAParameters:
+ if key_size not in (1024, 2048, 3072, 4096):
+ raise ValueError(
+ "Key size must be 1024, 2048, 3072, or 4096 bits."
+ )
+
+ return rust_openssl.dsa.generate_parameters(key_size)
+
+ def generate_dsa_private_key(
+ self, parameters: dsa.DSAParameters
+ ) -> dsa.DSAPrivateKey:
+ return parameters.generate_private_key()
+
+ def generate_dsa_private_key_and_parameters(
+ self, key_size: int
+ ) -> dsa.DSAPrivateKey:
+ parameters = self.generate_dsa_parameters(key_size)
+ return self.generate_dsa_private_key(parameters)
+
+ def load_dsa_private_numbers(
+ self, numbers: dsa.DSAPrivateNumbers
+ ) -> dsa.DSAPrivateKey:
+ dsa._check_dsa_private_numbers(numbers)
+ return rust_openssl.dsa.from_private_numbers(numbers)
+
+ def load_dsa_public_numbers(
+ self, numbers: dsa.DSAPublicNumbers
+ ) -> dsa.DSAPublicKey:
+ dsa._check_dsa_parameters(numbers.parameter_numbers)
+ return rust_openssl.dsa.from_public_numbers(numbers)
+
+ def load_dsa_parameter_numbers(
+ self, numbers: dsa.DSAParameterNumbers
+ ) -> dsa.DSAParameters:
+ dsa._check_dsa_parameters(numbers)
+ return rust_openssl.dsa.from_parameter_numbers(numbers)
+
+ def dsa_supported(self) -> bool:
+ return (
+ not self._lib.CRYPTOGRAPHY_IS_BORINGSSL and not self._fips_enabled
+ )
+
+ def dsa_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool:
+ if not self.dsa_supported():
+ return False
+ return self.signature_hash_supported(algorithm)
+
+ def cmac_algorithm_supported(self, algorithm) -> bool:
+ return self.cipher_supported(
+ algorithm, CBC(b"\x00" * algorithm.block_size)
+ )
+
+ def create_cmac_ctx(self, algorithm: BlockCipherAlgorithm) -> _CMACContext:
+ return _CMACContext(self, algorithm)
+
+ def load_pem_private_key(
+ self,
+ data: bytes,
+ password: typing.Optional[bytes],
+ unsafe_skip_rsa_key_validation: bool,
+ ) -> PrivateKeyTypes:
+ return self._load_key(
+ self._lib.PEM_read_bio_PrivateKey,
+ data,
+ password,
+ unsafe_skip_rsa_key_validation,
+ )
+
+ def load_pem_public_key(self, data: bytes) -> PublicKeyTypes:
+ mem_bio = self._bytes_to_bio(data)
+ # In OpenSSL 3.0.x the PEM_read_bio_PUBKEY function will invoke
+ # the default password callback if you pass an encrypted private
+ # key. This is very, very, very bad as the default callback can
+ # trigger an interactive console prompt, which will hang the
+ # Python process. We therefore provide our own callback to
+ # catch this and error out properly.
+ userdata = self._ffi.new("CRYPTOGRAPHY_PASSWORD_DATA *")
+ evp_pkey = self._lib.PEM_read_bio_PUBKEY(
+ mem_bio.bio,
+ self._ffi.NULL,
+ self._ffi.addressof(
+ self._lib._original_lib, "Cryptography_pem_password_cb"
+ ),
+ userdata,
+ )
+ if evp_pkey != self._ffi.NULL:
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ return self._evp_pkey_to_public_key(evp_pkey)
+ else:
+ # It's not a (RSA/DSA/ECDSA) subjectPublicKeyInfo, but we still
+ # need to check to see if it is a pure PKCS1 RSA public key (not
+ # embedded in a subjectPublicKeyInfo)
+ self._consume_errors()
+ res = self._lib.BIO_reset(mem_bio.bio)
+ self.openssl_assert(res == 1)
+ rsa_cdata = self._lib.PEM_read_bio_RSAPublicKey(
+ mem_bio.bio,
+ self._ffi.NULL,
+ self._ffi.addressof(
+ self._lib._original_lib, "Cryptography_pem_password_cb"
+ ),
+ userdata,
+ )
+ if rsa_cdata != self._ffi.NULL:
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+ return _RSAPublicKey(self, rsa_cdata, evp_pkey)
+ else:
+ self._handle_key_loading_error()
+
+ def load_pem_parameters(self, data: bytes) -> dh.DHParameters:
+ return rust_openssl.dh.from_pem_parameters(data)
+
+ def load_der_private_key(
+ self,
+ data: bytes,
+ password: typing.Optional[bytes],
+ unsafe_skip_rsa_key_validation: bool,
+ ) -> PrivateKeyTypes:
+ # OpenSSL has a function called d2i_AutoPrivateKey that in theory
+ # handles this automatically, however it doesn't handle encrypted
+ # private keys. Instead we try to load the key two different ways.
+ # First we'll try to load it as a traditional key.
+ bio_data = self._bytes_to_bio(data)
+ key = self._evp_pkey_from_der_traditional_key(bio_data, password)
+ if key:
+ return self._evp_pkey_to_private_key(
+ key, unsafe_skip_rsa_key_validation
+ )
+ else:
+ # Finally we try to load it with the method that handles encrypted
+ # PKCS8 properly.
+ return self._load_key(
+ self._lib.d2i_PKCS8PrivateKey_bio,
+ data,
+ password,
+ unsafe_skip_rsa_key_validation,
+ )
+
+ def _evp_pkey_from_der_traditional_key(self, bio_data, password):
+ key = self._lib.d2i_PrivateKey_bio(bio_data.bio, self._ffi.NULL)
+ if key != self._ffi.NULL:
+ key = self._ffi.gc(key, self._lib.EVP_PKEY_free)
+ if password is not None:
+ raise TypeError(
+ "Password was given but private key is not encrypted."
+ )
+
+ return key
+ else:
+ self._consume_errors()
+ return None
+
+ def load_der_public_key(self, data: bytes) -> PublicKeyTypes:
+ mem_bio = self._bytes_to_bio(data)
+ evp_pkey = self._lib.d2i_PUBKEY_bio(mem_bio.bio, self._ffi.NULL)
+ if evp_pkey != self._ffi.NULL:
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ return self._evp_pkey_to_public_key(evp_pkey)
+ else:
+ # It's not a (RSA/DSA/ECDSA) subjectPublicKeyInfo, but we still
+ # need to check to see if it is a pure PKCS1 RSA public key (not
+ # embedded in a subjectPublicKeyInfo)
+ self._consume_errors()
+ res = self._lib.BIO_reset(mem_bio.bio)
+ self.openssl_assert(res == 1)
+ rsa_cdata = self._lib.d2i_RSAPublicKey_bio(
+ mem_bio.bio, self._ffi.NULL
+ )
+ if rsa_cdata != self._ffi.NULL:
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+ return _RSAPublicKey(self, rsa_cdata, evp_pkey)
+ else:
+ self._handle_key_loading_error()
+
+ def load_der_parameters(self, data: bytes) -> dh.DHParameters:
+ return rust_openssl.dh.from_der_parameters(data)
+
+ def _cert2ossl(self, cert: x509.Certificate) -> typing.Any:
+ data = cert.public_bytes(serialization.Encoding.DER)
+ mem_bio = self._bytes_to_bio(data)
+ x509 = self._lib.d2i_X509_bio(mem_bio.bio, self._ffi.NULL)
+ self.openssl_assert(x509 != self._ffi.NULL)
+ x509 = self._ffi.gc(x509, self._lib.X509_free)
+ return x509
+
+ def _ossl2cert(self, x509_ptr: typing.Any) -> x509.Certificate:
+ bio = self._create_mem_bio_gc()
+ res = self._lib.i2d_X509_bio(bio, x509_ptr)
+ self.openssl_assert(res == 1)
+ return x509.load_der_x509_certificate(self._read_mem_bio(bio))
+
+ def _key2ossl(self, key: PKCS12PrivateKeyTypes) -> typing.Any:
+ data = key.private_bytes(
+ serialization.Encoding.DER,
+ serialization.PrivateFormat.PKCS8,
+ serialization.NoEncryption(),
+ )
+ mem_bio = self._bytes_to_bio(data)
+
+ evp_pkey = self._lib.d2i_PrivateKey_bio(
+ mem_bio.bio,
+ self._ffi.NULL,
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ return self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+
+ def _load_key(
+ self, openssl_read_func, data, password, unsafe_skip_rsa_key_validation
+ ) -> PrivateKeyTypes:
+ mem_bio = self._bytes_to_bio(data)
+
+ userdata = self._ffi.new("CRYPTOGRAPHY_PASSWORD_DATA *")
+ if password is not None:
+ utils._check_byteslike("password", password)
+ password_ptr = self._ffi.from_buffer(password)
+ userdata.password = password_ptr
+ userdata.length = len(password)
+
+ evp_pkey = openssl_read_func(
+ mem_bio.bio,
+ self._ffi.NULL,
+ self._ffi.addressof(
+ self._lib._original_lib, "Cryptography_pem_password_cb"
+ ),
+ userdata,
+ )
+
+ if evp_pkey == self._ffi.NULL:
+ if userdata.error != 0:
+ self._consume_errors()
+ if userdata.error == -1:
+ raise TypeError(
+ "Password was not given but private key is encrypted"
+ )
+ else:
+ assert userdata.error == -2
+ raise ValueError(
+ "Passwords longer than {} bytes are not supported "
+ "by this backend.".format(userdata.maxsize - 1)
+ )
+ else:
+ self._handle_key_loading_error()
+
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+
+ if password is not None and userdata.called == 0:
+ raise TypeError(
+ "Password was given but private key is not encrypted."
+ )
+
+ assert (
+ password is not None and userdata.called == 1
+ ) or password is None
+
+ return self._evp_pkey_to_private_key(
+ evp_pkey, unsafe_skip_rsa_key_validation
+ )
+
+ def _handle_key_loading_error(self) -> typing.NoReturn:
+ errors = self._consume_errors()
+
+ if not errors:
+ raise ValueError(
+ "Could not deserialize key data. The data may be in an "
+ "incorrect format or it may be encrypted with an unsupported "
+ "algorithm."
+ )
+
+ elif (
+ errors[0]._lib_reason_match(
+ self._lib.ERR_LIB_EVP, self._lib.EVP_R_BAD_DECRYPT
+ )
+ or errors[0]._lib_reason_match(
+ self._lib.ERR_LIB_PKCS12,
+ self._lib.PKCS12_R_PKCS12_CIPHERFINAL_ERROR,
+ )
+ or (
+ self._lib.Cryptography_HAS_PROVIDERS
+ and errors[0]._lib_reason_match(
+ self._lib.ERR_LIB_PROV,
+ self._lib.PROV_R_BAD_DECRYPT,
+ )
+ )
+ ):
+ raise ValueError("Bad decrypt. Incorrect password?")
+
+ elif any(
+ error._lib_reason_match(
+ self._lib.ERR_LIB_EVP,
+ self._lib.EVP_R_UNSUPPORTED_PRIVATE_KEY_ALGORITHM,
+ )
+ for error in errors
+ ):
+ raise ValueError("Unsupported public key algorithm.")
+
+ else:
+ raise ValueError(
+ "Could not deserialize key data. The data may be in an "
+ "incorrect format, it may be encrypted with an unsupported "
+ "algorithm, or it may be an unsupported key type (e.g. EC "
+ "curves with explicit parameters).",
+ errors,
+ )
+
+ def elliptic_curve_supported(self, curve: ec.EllipticCurve) -> bool:
+ try:
+ curve_nid = self._elliptic_curve_to_nid(curve)
+ except UnsupportedAlgorithm:
+ curve_nid = self._lib.NID_undef
+
+ group = self._lib.EC_GROUP_new_by_curve_name(curve_nid)
+
+ if group == self._ffi.NULL:
+ self._consume_errors()
+ return False
+ else:
+ self.openssl_assert(curve_nid != self._lib.NID_undef)
+ self._lib.EC_GROUP_free(group)
+ return True
+
+ def elliptic_curve_signature_algorithm_supported(
+ self,
+ signature_algorithm: ec.EllipticCurveSignatureAlgorithm,
+ curve: ec.EllipticCurve,
+ ) -> bool:
+ # We only support ECDSA right now.
+ if not isinstance(signature_algorithm, ec.ECDSA):
+ return False
+
+ return self.elliptic_curve_supported(curve)
+
+ def generate_elliptic_curve_private_key(
+ self, curve: ec.EllipticCurve
+ ) -> ec.EllipticCurvePrivateKey:
+ """
+ Generate a new private key on the named curve.
+ """
+
+ if self.elliptic_curve_supported(curve):
+ ec_cdata = self._ec_key_new_by_curve(curve)
+
+ res = self._lib.EC_KEY_generate_key(ec_cdata)
+ self.openssl_assert(res == 1)
+
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+
+ return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
+ else:
+ raise UnsupportedAlgorithm(
+ f"Backend object does not support {curve.name}.",
+ _Reasons.UNSUPPORTED_ELLIPTIC_CURVE,
+ )
+
+ def load_elliptic_curve_private_numbers(
+ self, numbers: ec.EllipticCurvePrivateNumbers
+ ) -> ec.EllipticCurvePrivateKey:
+ public = numbers.public_numbers
+
+ ec_cdata = self._ec_key_new_by_curve(public.curve)
+
+ private_value = self._ffi.gc(
+ self._int_to_bn(numbers.private_value), self._lib.BN_clear_free
+ )
+ res = self._lib.EC_KEY_set_private_key(ec_cdata, private_value)
+ if res != 1:
+ self._consume_errors()
+ raise ValueError("Invalid EC key.")
+
+ with self._tmp_bn_ctx() as bn_ctx:
+ self._ec_key_set_public_key_affine_coordinates(
+ ec_cdata, public.x, public.y, bn_ctx
+ )
+ # derive the expected public point and compare it to the one we
+ # just set based on the values we were given. If they don't match
+ # this isn't a valid key pair.
+ group = self._lib.EC_KEY_get0_group(ec_cdata)
+ self.openssl_assert(group != self._ffi.NULL)
+ set_point = backend._lib.EC_KEY_get0_public_key(ec_cdata)
+ self.openssl_assert(set_point != self._ffi.NULL)
+ computed_point = self._lib.EC_POINT_new(group)
+ self.openssl_assert(computed_point != self._ffi.NULL)
+ computed_point = self._ffi.gc(
+ computed_point, self._lib.EC_POINT_free
+ )
+ res = self._lib.EC_POINT_mul(
+ group,
+ computed_point,
+ private_value,
+ self._ffi.NULL,
+ self._ffi.NULL,
+ bn_ctx,
+ )
+ self.openssl_assert(res == 1)
+ if (
+ self._lib.EC_POINT_cmp(
+ group, set_point, computed_point, bn_ctx
+ )
+ != 0
+ ):
+ raise ValueError("Invalid EC key.")
+
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+
+ return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
+
+ def load_elliptic_curve_public_numbers(
+ self, numbers: ec.EllipticCurvePublicNumbers
+ ) -> ec.EllipticCurvePublicKey:
+ ec_cdata = self._ec_key_new_by_curve(numbers.curve)
+ with self._tmp_bn_ctx() as bn_ctx:
+ self._ec_key_set_public_key_affine_coordinates(
+ ec_cdata, numbers.x, numbers.y, bn_ctx
+ )
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+
+ return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey)
+
+ def load_elliptic_curve_public_bytes(
+ self, curve: ec.EllipticCurve, point_bytes: bytes
+ ) -> ec.EllipticCurvePublicKey:
+ ec_cdata = self._ec_key_new_by_curve(curve)
+ group = self._lib.EC_KEY_get0_group(ec_cdata)
+ self.openssl_assert(group != self._ffi.NULL)
+ point = self._lib.EC_POINT_new(group)
+ self.openssl_assert(point != self._ffi.NULL)
+ point = self._ffi.gc(point, self._lib.EC_POINT_free)
+ with self._tmp_bn_ctx() as bn_ctx:
+ res = self._lib.EC_POINT_oct2point(
+ group, point, point_bytes, len(point_bytes), bn_ctx
+ )
+ if res != 1:
+ self._consume_errors()
+ raise ValueError("Invalid public bytes for the given curve")
+
+ res = self._lib.EC_KEY_set_public_key(ec_cdata, point)
+ self.openssl_assert(res == 1)
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+ return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey)
+
+ def derive_elliptic_curve_private_key(
+ self, private_value: int, curve: ec.EllipticCurve
+ ) -> ec.EllipticCurvePrivateKey:
+ ec_cdata = self._ec_key_new_by_curve(curve)
+
+ group = self._lib.EC_KEY_get0_group(ec_cdata)
+ self.openssl_assert(group != self._ffi.NULL)
+
+ point = self._lib.EC_POINT_new(group)
+ self.openssl_assert(point != self._ffi.NULL)
+ point = self._ffi.gc(point, self._lib.EC_POINT_free)
+
+ value = self._int_to_bn(private_value)
+ value = self._ffi.gc(value, self._lib.BN_clear_free)
+
+ with self._tmp_bn_ctx() as bn_ctx:
+ res = self._lib.EC_POINT_mul(
+ group, point, value, self._ffi.NULL, self._ffi.NULL, bn_ctx
+ )
+ self.openssl_assert(res == 1)
+
+ bn_x = self._lib.BN_CTX_get(bn_ctx)
+ bn_y = self._lib.BN_CTX_get(bn_ctx)
+
+ res = self._lib.EC_POINT_get_affine_coordinates(
+ group, point, bn_x, bn_y, bn_ctx
+ )
+ if res != 1:
+ self._consume_errors()
+ raise ValueError("Unable to derive key from private_value")
+
+ res = self._lib.EC_KEY_set_public_key(ec_cdata, point)
+ self.openssl_assert(res == 1)
+ private = self._int_to_bn(private_value)
+ private = self._ffi.gc(private, self._lib.BN_clear_free)
+ res = self._lib.EC_KEY_set_private_key(ec_cdata, private)
+ self.openssl_assert(res == 1)
+
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+
+ return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
+
+ def _ec_key_new_by_curve(self, curve: ec.EllipticCurve):
+ curve_nid = self._elliptic_curve_to_nid(curve)
+ return self._ec_key_new_by_curve_nid(curve_nid)
+
+ def _ec_key_new_by_curve_nid(self, curve_nid: int):
+ ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid)
+ self.openssl_assert(ec_cdata != self._ffi.NULL)
+ return self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
+
+ def elliptic_curve_exchange_algorithm_supported(
+ self, algorithm: ec.ECDH, curve: ec.EllipticCurve
+ ) -> bool:
+ if self._fips_enabled and not isinstance(
+ curve, self._fips_ecdh_curves
+ ):
+ return False
+
+ return self.elliptic_curve_supported(curve) and isinstance(
+ algorithm, ec.ECDH
+ )
+
+ def _ec_cdata_to_evp_pkey(self, ec_cdata):
+ evp_pkey = self._create_evp_pkey_gc()
+ res = self._lib.EVP_PKEY_set1_EC_KEY(evp_pkey, ec_cdata)
+ self.openssl_assert(res == 1)
+ return evp_pkey
+
+ def _elliptic_curve_to_nid(self, curve: ec.EllipticCurve) -> int:
+ """
+ Get the NID for a curve name.
+ """
+
+ curve_aliases = {"secp192r1": "prime192v1", "secp256r1": "prime256v1"}
+
+ curve_name = curve_aliases.get(curve.name, curve.name)
+
+ curve_nid = self._lib.OBJ_sn2nid(curve_name.encode())
+ if curve_nid == self._lib.NID_undef:
+ raise UnsupportedAlgorithm(
+ f"{curve.name} is not a supported elliptic curve",
+ _Reasons.UNSUPPORTED_ELLIPTIC_CURVE,
+ )
+ return curve_nid
+
+ @contextmanager
+ def _tmp_bn_ctx(self):
+ bn_ctx = self._lib.BN_CTX_new()
+ self.openssl_assert(bn_ctx != self._ffi.NULL)
+ bn_ctx = self._ffi.gc(bn_ctx, self._lib.BN_CTX_free)
+ self._lib.BN_CTX_start(bn_ctx)
+ try:
+ yield bn_ctx
+ finally:
+ self._lib.BN_CTX_end(bn_ctx)
+
+ def _ec_key_set_public_key_affine_coordinates(
+ self,
+ ec_cdata,
+ x: int,
+ y: int,
+ bn_ctx,
+ ) -> None:
+ """
+ Sets the public key point in the EC_KEY context to the affine x and y
+ values.
+ """
+
+ if x < 0 or y < 0:
+ raise ValueError(
+ "Invalid EC key. Both x and y must be non-negative."
+ )
+
+ x = self._ffi.gc(self._int_to_bn(x), self._lib.BN_free)
+ y = self._ffi.gc(self._int_to_bn(y), self._lib.BN_free)
+ group = self._lib.EC_KEY_get0_group(ec_cdata)
+ self.openssl_assert(group != self._ffi.NULL)
+ point = self._lib.EC_POINT_new(group)
+ self.openssl_assert(point != self._ffi.NULL)
+ point = self._ffi.gc(point, self._lib.EC_POINT_free)
+ res = self._lib.EC_POINT_set_affine_coordinates(
+ group, point, x, y, bn_ctx
+ )
+ if res != 1:
+ self._consume_errors()
+ raise ValueError("Invalid EC key.")
+ res = self._lib.EC_KEY_set_public_key(ec_cdata, point)
+ self.openssl_assert(res == 1)
+
+ def _private_key_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PrivateFormat,
+ encryption_algorithm: serialization.KeySerializationEncryption,
+ key,
+ evp_pkey,
+ cdata,
+ ) -> bytes:
+ # validate argument types
+ if not isinstance(encoding, serialization.Encoding):
+ raise TypeError("encoding must be an item from the Encoding enum")
+ if not isinstance(format, serialization.PrivateFormat):
+ raise TypeError(
+ "format must be an item from the PrivateFormat enum"
+ )
+ if not isinstance(
+ encryption_algorithm, serialization.KeySerializationEncryption
+ ):
+ raise TypeError(
+ "Encryption algorithm must be a KeySerializationEncryption "
+ "instance"
+ )
+
+ # validate password
+ if isinstance(encryption_algorithm, serialization.NoEncryption):
+ password = b""
+ elif isinstance(
+ encryption_algorithm, serialization.BestAvailableEncryption
+ ):
+ password = encryption_algorithm.password
+ if len(password) > 1023:
+ raise ValueError(
+ "Passwords longer than 1023 bytes are not supported by "
+ "this backend"
+ )
+ elif (
+ isinstance(
+ encryption_algorithm, serialization._KeySerializationEncryption
+ )
+ and encryption_algorithm._format
+ is format
+ is serialization.PrivateFormat.OpenSSH
+ ):
+ password = encryption_algorithm.password
+ else:
+ raise ValueError("Unsupported encryption type")
+
+ # PKCS8 + PEM/DER
+ if format is serialization.PrivateFormat.PKCS8:
+ if encoding is serialization.Encoding.PEM:
+ write_bio = self._lib.PEM_write_bio_PKCS8PrivateKey
+ elif encoding is serialization.Encoding.DER:
+ write_bio = self._lib.i2d_PKCS8PrivateKey_bio
+ else:
+ raise ValueError("Unsupported encoding for PKCS8")
+ return self._private_key_bytes_via_bio(
+ write_bio, evp_pkey, password
+ )
+
+ # TraditionalOpenSSL + PEM/DER
+ if format is serialization.PrivateFormat.TraditionalOpenSSL:
+ if self._fips_enabled and not isinstance(
+ encryption_algorithm, serialization.NoEncryption
+ ):
+ raise ValueError(
+ "Encrypted traditional OpenSSL format is not "
+ "supported in FIPS mode."
+ )
+ key_type = self._lib.EVP_PKEY_id(evp_pkey)
+
+ if encoding is serialization.Encoding.PEM:
+ if key_type == self._lib.EVP_PKEY_RSA:
+ write_bio = self._lib.PEM_write_bio_RSAPrivateKey
+ else:
+ assert key_type == self._lib.EVP_PKEY_EC
+ write_bio = self._lib.PEM_write_bio_ECPrivateKey
+ return self._private_key_bytes_via_bio(
+ write_bio, cdata, password
+ )
+
+ if encoding is serialization.Encoding.DER:
+ if password:
+ raise ValueError(
+ "Encryption is not supported for DER encoded "
+ "traditional OpenSSL keys"
+ )
+ if key_type == self._lib.EVP_PKEY_RSA:
+ write_bio = self._lib.i2d_RSAPrivateKey_bio
+ else:
+ assert key_type == self._lib.EVP_PKEY_EC
+ write_bio = self._lib.i2d_ECPrivateKey_bio
+ return self._bio_func_output(write_bio, cdata)
+
+ raise ValueError("Unsupported encoding for TraditionalOpenSSL")
+
+ # OpenSSH + PEM
+ if format is serialization.PrivateFormat.OpenSSH:
+ if encoding is serialization.Encoding.PEM:
+ return ssh._serialize_ssh_private_key(
+ key, password, encryption_algorithm
+ )
+
+ raise ValueError(
+ "OpenSSH private key format can only be used"
+ " with PEM encoding"
+ )
+
+ # Anything that key-specific code was supposed to handle earlier,
+ # like Raw.
+ raise ValueError("format is invalid with this key")
+
+ def _private_key_bytes_via_bio(
+ self, write_bio, evp_pkey, password
+ ) -> bytes:
+ if not password:
+ evp_cipher = self._ffi.NULL
+ else:
+ # This is a curated value that we will update over time.
+ evp_cipher = self._lib.EVP_get_cipherbyname(b"aes-256-cbc")
+
+ return self._bio_func_output(
+ write_bio,
+ evp_pkey,
+ evp_cipher,
+ password,
+ len(password),
+ self._ffi.NULL,
+ self._ffi.NULL,
+ )
+
+ def _bio_func_output(self, write_bio, *args) -> bytes:
+ bio = self._create_mem_bio_gc()
+ res = write_bio(bio, *args)
+ self.openssl_assert(res == 1)
+ return self._read_mem_bio(bio)
+
+ def _public_key_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PublicFormat,
+ key,
+ evp_pkey,
+ cdata,
+ ) -> bytes:
+ if not isinstance(encoding, serialization.Encoding):
+ raise TypeError("encoding must be an item from the Encoding enum")
+ if not isinstance(format, serialization.PublicFormat):
+ raise TypeError(
+ "format must be an item from the PublicFormat enum"
+ )
+
+ # SubjectPublicKeyInfo + PEM/DER
+ if format is serialization.PublicFormat.SubjectPublicKeyInfo:
+ if encoding is serialization.Encoding.PEM:
+ write_bio = self._lib.PEM_write_bio_PUBKEY
+ elif encoding is serialization.Encoding.DER:
+ write_bio = self._lib.i2d_PUBKEY_bio
+ else:
+ raise ValueError(
+ "SubjectPublicKeyInfo works only with PEM or DER encoding"
+ )
+ return self._bio_func_output(write_bio, evp_pkey)
+
+ # PKCS1 + PEM/DER
+ if format is serialization.PublicFormat.PKCS1:
+ # Only RSA is supported here.
+ key_type = self._lib.EVP_PKEY_id(evp_pkey)
+ if key_type != self._lib.EVP_PKEY_RSA:
+ raise ValueError("PKCS1 format is supported only for RSA keys")
+
+ if encoding is serialization.Encoding.PEM:
+ write_bio = self._lib.PEM_write_bio_RSAPublicKey
+ elif encoding is serialization.Encoding.DER:
+ write_bio = self._lib.i2d_RSAPublicKey_bio
+ else:
+ raise ValueError("PKCS1 works only with PEM or DER encoding")
+ return self._bio_func_output(write_bio, cdata)
+
+ # OpenSSH + OpenSSH
+ if format is serialization.PublicFormat.OpenSSH:
+ if encoding is serialization.Encoding.OpenSSH:
+ return ssh.serialize_ssh_public_key(key)
+
+ raise ValueError(
+ "OpenSSH format must be used with OpenSSH encoding"
+ )
+
+ # Anything that key-specific code was supposed to handle earlier,
+ # like Raw, CompressedPoint, UncompressedPoint
+ raise ValueError("format is invalid with this key")
+
+ def dh_supported(self) -> bool:
+ return not self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+
+ def generate_dh_parameters(
+ self, generator: int, key_size: int
+ ) -> dh.DHParameters:
+ return rust_openssl.dh.generate_parameters(generator, key_size)
+
+ def generate_dh_private_key(
+ self, parameters: dh.DHParameters
+ ) -> dh.DHPrivateKey:
+ return parameters.generate_private_key()
+
+ def generate_dh_private_key_and_parameters(
+ self, generator: int, key_size: int
+ ) -> dh.DHPrivateKey:
+ return self.generate_dh_private_key(
+ self.generate_dh_parameters(generator, key_size)
+ )
+
+ def load_dh_private_numbers(
+ self, numbers: dh.DHPrivateNumbers
+ ) -> dh.DHPrivateKey:
+ return rust_openssl.dh.from_private_numbers(numbers)
+
+ def load_dh_public_numbers(
+ self, numbers: dh.DHPublicNumbers
+ ) -> dh.DHPublicKey:
+ return rust_openssl.dh.from_public_numbers(numbers)
+
+ def load_dh_parameter_numbers(
+ self, numbers: dh.DHParameterNumbers
+ ) -> dh.DHParameters:
+ return rust_openssl.dh.from_parameter_numbers(numbers)
+
+ def dh_parameters_supported(
+ self, p: int, g: int, q: typing.Optional[int] = None
+ ) -> bool:
+ try:
+ rust_openssl.dh.from_parameter_numbers(
+ dh.DHParameterNumbers(p=p, g=g, q=q)
+ )
+ except ValueError:
+ return False
+ else:
+ return True
+
+ def dh_x942_serialization_supported(self) -> bool:
+ return self._lib.Cryptography_HAS_EVP_PKEY_DHX == 1
+
+ def x25519_load_public_bytes(self, data: bytes) -> x25519.X25519PublicKey:
+ return rust_openssl.x25519.from_public_bytes(data)
+
+ def x25519_load_private_bytes(
+ self, data: bytes
+ ) -> x25519.X25519PrivateKey:
+ return rust_openssl.x25519.from_private_bytes(data)
+
+ def x25519_generate_key(self) -> x25519.X25519PrivateKey:
+ return rust_openssl.x25519.generate_key()
+
+ def x25519_supported(self) -> bool:
+ if self._fips_enabled:
+ return False
+ return not self._lib.CRYPTOGRAPHY_LIBRESSL_LESS_THAN_370
+
+ def x448_load_public_bytes(self, data: bytes) -> x448.X448PublicKey:
+ return rust_openssl.x448.from_public_bytes(data)
+
+ def x448_load_private_bytes(self, data: bytes) -> x448.X448PrivateKey:
+ return rust_openssl.x448.from_private_bytes(data)
+
+ def x448_generate_key(self) -> x448.X448PrivateKey:
+ return rust_openssl.x448.generate_key()
+
+ def x448_supported(self) -> bool:
+ if self._fips_enabled:
+ return False
+ return (
+ not self._lib.CRYPTOGRAPHY_IS_LIBRESSL
+ and not self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+ )
+
+ def ed25519_supported(self) -> bool:
+ if self._fips_enabled:
+ return False
+ return self._lib.CRYPTOGRAPHY_HAS_WORKING_ED25519
+
+ def ed25519_load_public_bytes(
+ self, data: bytes
+ ) -> ed25519.Ed25519PublicKey:
+ return rust_openssl.ed25519.from_public_bytes(data)
+
+ def ed25519_load_private_bytes(
+ self, data: bytes
+ ) -> ed25519.Ed25519PrivateKey:
+ return rust_openssl.ed25519.from_private_bytes(data)
+
+ def ed25519_generate_key(self) -> ed25519.Ed25519PrivateKey:
+ return rust_openssl.ed25519.generate_key()
+
+ def ed448_supported(self) -> bool:
+ if self._fips_enabled:
+ return False
+ return (
+ not self._lib.CRYPTOGRAPHY_IS_LIBRESSL
+ and not self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+ )
+
+ def ed448_load_public_bytes(self, data: bytes) -> ed448.Ed448PublicKey:
+ return rust_openssl.ed448.from_public_bytes(data)
+
+ def ed448_load_private_bytes(self, data: bytes) -> ed448.Ed448PrivateKey:
+ return rust_openssl.ed448.from_private_bytes(data)
+
+ def ed448_generate_key(self) -> ed448.Ed448PrivateKey:
+ return rust_openssl.ed448.generate_key()
+
+ def aead_cipher_supported(self, cipher) -> bool:
+ return aead._aead_cipher_supported(self, cipher)
+
+ def _zero_data(self, data, length: int) -> None:
+ # We clear things this way because at the moment we're not
+ # sure of a better way that can guarantee it overwrites the
+ # memory of a bytearray and doesn't just replace the underlying char *.
+ for i in range(length):
+ data[i] = 0
+
+ @contextlib.contextmanager
+ def _zeroed_null_terminated_buf(self, data):
+ """
+ This method takes bytes, which can be a bytestring or a mutable
+ buffer like a bytearray, and yields a null-terminated version of that
+ data. This is required because PKCS12_parse doesn't take a length with
+ its password char * and ffi.from_buffer doesn't provide null
+ termination. So, to support zeroing the data via bytearray we
+ need to build this ridiculous construct that copies the memory, but
+ zeroes it after use.
+ """
+ if data is None:
+ yield self._ffi.NULL
+ else:
+ data_len = len(data)
+ buf = self._ffi.new("char[]", data_len + 1)
+ self._ffi.memmove(buf, data, data_len)
+ try:
+ yield buf
+ finally:
+ # Cast to a uint8_t * so we can assign by integer
+ self._zero_data(self._ffi.cast("uint8_t *", buf), data_len)
+
+ def load_key_and_certificates_from_pkcs12(
+ self, data: bytes, password: typing.Optional[bytes]
+ ) -> typing.Tuple[
+ typing.Optional[PrivateKeyTypes],
+ typing.Optional[x509.Certificate],
+ typing.List[x509.Certificate],
+ ]:
+ pkcs12 = self.load_pkcs12(data, password)
+ return (
+ pkcs12.key,
+ pkcs12.cert.certificate if pkcs12.cert else None,
+ [cert.certificate for cert in pkcs12.additional_certs],
+ )
+
+ def load_pkcs12(
+ self, data: bytes, password: typing.Optional[bytes]
+ ) -> PKCS12KeyAndCertificates:
+ if password is not None:
+ utils._check_byteslike("password", password)
+
+ bio = self._bytes_to_bio(data)
+ p12 = self._lib.d2i_PKCS12_bio(bio.bio, self._ffi.NULL)
+ if p12 == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Could not deserialize PKCS12 data")
+
+ p12 = self._ffi.gc(p12, self._lib.PKCS12_free)
+ evp_pkey_ptr = self._ffi.new("EVP_PKEY **")
+ x509_ptr = self._ffi.new("X509 **")
+ sk_x509_ptr = self._ffi.new("Cryptography_STACK_OF_X509 **")
+ with self._zeroed_null_terminated_buf(password) as password_buf:
+ res = self._lib.PKCS12_parse(
+ p12, password_buf, evp_pkey_ptr, x509_ptr, sk_x509_ptr
+ )
+ if res == 0:
+ self._consume_errors()
+ raise ValueError("Invalid password or PKCS12 data")
+
+ cert = None
+ key = None
+ additional_certificates = []
+
+ if evp_pkey_ptr[0] != self._ffi.NULL:
+ evp_pkey = self._ffi.gc(evp_pkey_ptr[0], self._lib.EVP_PKEY_free)
+ # We don't support turning off RSA key validation when loading
+ # PKCS12 keys
+ key = self._evp_pkey_to_private_key(
+ evp_pkey, unsafe_skip_rsa_key_validation=False
+ )
+
+ if x509_ptr[0] != self._ffi.NULL:
+ x509 = self._ffi.gc(x509_ptr[0], self._lib.X509_free)
+ cert_obj = self._ossl2cert(x509)
+ name = None
+ maybe_name = self._lib.X509_alias_get0(x509, self._ffi.NULL)
+ if maybe_name != self._ffi.NULL:
+ name = self._ffi.string(maybe_name)
+ cert = PKCS12Certificate(cert_obj, name)
+
+ if sk_x509_ptr[0] != self._ffi.NULL:
+ sk_x509 = self._ffi.gc(sk_x509_ptr[0], self._lib.sk_X509_free)
+ num = self._lib.sk_X509_num(sk_x509_ptr[0])
+
+ # In OpenSSL < 3.0.0 PKCS12 parsing reverses the order of the
+ # certificates.
+ indices: typing.Iterable[int]
+ if (
+ self._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER
+ or self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+ ):
+ indices = range(num)
+ else:
+ indices = reversed(range(num))
+
+ for i in indices:
+ x509 = self._lib.sk_X509_value(sk_x509, i)
+ self.openssl_assert(x509 != self._ffi.NULL)
+ x509 = self._ffi.gc(x509, self._lib.X509_free)
+ addl_cert = self._ossl2cert(x509)
+ addl_name = None
+ maybe_name = self._lib.X509_alias_get0(x509, self._ffi.NULL)
+ if maybe_name != self._ffi.NULL:
+ addl_name = self._ffi.string(maybe_name)
+ additional_certificates.append(
+ PKCS12Certificate(addl_cert, addl_name)
+ )
+
+ return PKCS12KeyAndCertificates(key, cert, additional_certificates)
+
+ def serialize_key_and_certificates_to_pkcs12(
+ self,
+ name: typing.Optional[bytes],
+ key: typing.Optional[PKCS12PrivateKeyTypes],
+ cert: typing.Optional[x509.Certificate],
+ cas: typing.Optional[typing.List[_PKCS12CATypes]],
+ encryption_algorithm: serialization.KeySerializationEncryption,
+ ) -> bytes:
+ password = None
+ if name is not None:
+ utils._check_bytes("name", name)
+
+ if isinstance(encryption_algorithm, serialization.NoEncryption):
+ nid_cert = -1
+ nid_key = -1
+ pkcs12_iter = 0
+ mac_iter = 0
+ mac_alg = self._ffi.NULL
+ elif isinstance(
+ encryption_algorithm, serialization.BestAvailableEncryption
+ ):
+ # PKCS12 encryption is hopeless trash and can never be fixed.
+ # OpenSSL 3 supports PBESv2, but Libre and Boring do not, so
+ # we use PBESv1 with 3DES on the older paths.
+ if self._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER:
+ nid_cert = self._lib.NID_aes_256_cbc
+ nid_key = self._lib.NID_aes_256_cbc
+ else:
+ nid_cert = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC
+ nid_key = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC
+ # At least we can set this higher than OpenSSL's default
+ pkcs12_iter = 20000
+ # mac_iter chosen for compatibility reasons, see:
+ # https://www.openssl.org/docs/man1.1.1/man3/PKCS12_create.html
+ # Did we mention how lousy PKCS12 encryption is?
+ mac_iter = 1
+ # MAC algorithm can only be set on OpenSSL 3.0.0+
+ mac_alg = self._ffi.NULL
+ password = encryption_algorithm.password
+ elif (
+ isinstance(
+ encryption_algorithm, serialization._KeySerializationEncryption
+ )
+ and encryption_algorithm._format
+ is serialization.PrivateFormat.PKCS12
+ ):
+ # Default to OpenSSL's defaults. Behavior will vary based on the
+ # version of OpenSSL cryptography is compiled against.
+ nid_cert = 0
+ nid_key = 0
+ # Use the default iters we use in best available
+ pkcs12_iter = 20000
+ # See the Best Available comment for why this is 1
+ mac_iter = 1
+ password = encryption_algorithm.password
+ keycertalg = encryption_algorithm._key_cert_algorithm
+ if keycertalg is PBES.PBESv1SHA1And3KeyTripleDESCBC:
+ nid_cert = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC
+ nid_key = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC
+ elif keycertalg is PBES.PBESv2SHA256AndAES256CBC:
+ if not self._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER:
+ raise UnsupportedAlgorithm(
+ "PBESv2 is not supported by this version of OpenSSL"
+ )
+ nid_cert = self._lib.NID_aes_256_cbc
+ nid_key = self._lib.NID_aes_256_cbc
+ else:
+ assert keycertalg is None
+ # We use OpenSSL's defaults
+
+ if encryption_algorithm._hmac_hash is not None:
+ if not self._lib.Cryptography_HAS_PKCS12_SET_MAC:
+ raise UnsupportedAlgorithm(
+ "Setting MAC algorithm is not supported by this "
+ "version of OpenSSL."
+ )
+ mac_alg = self._evp_md_non_null_from_algorithm(
+ encryption_algorithm._hmac_hash
+ )
+ self.openssl_assert(mac_alg != self._ffi.NULL)
+ else:
+ mac_alg = self._ffi.NULL
+
+ if encryption_algorithm._kdf_rounds is not None:
+ pkcs12_iter = encryption_algorithm._kdf_rounds
+
+ else:
+ raise ValueError("Unsupported key encryption type")
+
+ if cas is None or len(cas) == 0:
+ sk_x509 = self._ffi.NULL
+ else:
+ sk_x509 = self._lib.sk_X509_new_null()
+ sk_x509 = self._ffi.gc(sk_x509, self._lib.sk_X509_free)
+
+ # This list is to keep the x509 values alive until end of function
+ ossl_cas = []
+ for ca in cas:
+ if isinstance(ca, PKCS12Certificate):
+ ca_alias = ca.friendly_name
+ ossl_ca = self._cert2ossl(ca.certificate)
+ if ca_alias is None:
+ res = self._lib.X509_alias_set1(
+ ossl_ca, self._ffi.NULL, -1
+ )
+ else:
+ res = self._lib.X509_alias_set1(
+ ossl_ca, ca_alias, len(ca_alias)
+ )
+ self.openssl_assert(res == 1)
+ else:
+ ossl_ca = self._cert2ossl(ca)
+ ossl_cas.append(ossl_ca)
+ res = self._lib.sk_X509_push(sk_x509, ossl_ca)
+ backend.openssl_assert(res >= 1)
+
+ with self._zeroed_null_terminated_buf(password) as password_buf:
+ with self._zeroed_null_terminated_buf(name) as name_buf:
+ ossl_cert = self._cert2ossl(cert) if cert else self._ffi.NULL
+ ossl_pkey = (
+ self._key2ossl(key) if key is not None else self._ffi.NULL
+ )
+
+ p12 = self._lib.PKCS12_create(
+ password_buf,
+ name_buf,
+ ossl_pkey,
+ ossl_cert,
+ sk_x509,
+ nid_key,
+ nid_cert,
+ pkcs12_iter,
+ mac_iter,
+ 0,
+ )
+
+ if (
+ self._lib.Cryptography_HAS_PKCS12_SET_MAC
+ and mac_alg != self._ffi.NULL
+ ):
+ self._lib.PKCS12_set_mac(
+ p12,
+ password_buf,
+ -1,
+ self._ffi.NULL,
+ 0,
+ mac_iter,
+ mac_alg,
+ )
+
+ self.openssl_assert(p12 != self._ffi.NULL)
+ p12 = self._ffi.gc(p12, self._lib.PKCS12_free)
+
+ bio = self._create_mem_bio_gc()
+ res = self._lib.i2d_PKCS12_bio(bio, p12)
+ self.openssl_assert(res > 0)
+ return self._read_mem_bio(bio)
+
+ def poly1305_supported(self) -> bool:
+ if self._fips_enabled:
+ return False
+ return self._lib.Cryptography_HAS_POLY1305 == 1
+
+ def pkcs7_supported(self) -> bool:
+ return not self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+
+ def load_pem_pkcs7_certificates(
+ self, data: bytes
+ ) -> typing.List[x509.Certificate]:
+ utils._check_bytes("data", data)
+ bio = self._bytes_to_bio(data)
+ p7 = self._lib.PEM_read_bio_PKCS7(
+ bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL
+ )
+ if p7 == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Unable to parse PKCS7 data")
+
+ p7 = self._ffi.gc(p7, self._lib.PKCS7_free)
+ return self._load_pkcs7_certificates(p7)
+
+ def load_der_pkcs7_certificates(
+ self, data: bytes
+ ) -> typing.List[x509.Certificate]:
+ utils._check_bytes("data", data)
+ bio = self._bytes_to_bio(data)
+ p7 = self._lib.d2i_PKCS7_bio(bio.bio, self._ffi.NULL)
+ if p7 == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Unable to parse PKCS7 data")
+
+ p7 = self._ffi.gc(p7, self._lib.PKCS7_free)
+ return self._load_pkcs7_certificates(p7)
+
+ def _load_pkcs7_certificates(self, p7) -> typing.List[x509.Certificate]:
+ nid = self._lib.OBJ_obj2nid(p7.type)
+ self.openssl_assert(nid != self._lib.NID_undef)
+ if nid != self._lib.NID_pkcs7_signed:
+ raise UnsupportedAlgorithm(
+ "Only basic signed structures are currently supported. NID"
+ " for this data was {}".format(nid),
+ _Reasons.UNSUPPORTED_SERIALIZATION,
+ )
+
+ certs: list[x509.Certificate] = []
+ if p7.d.sign == self._ffi.NULL:
+ return certs
+
+ sk_x509 = p7.d.sign.cert
+ num = self._lib.sk_X509_num(sk_x509)
+ for i in range(num):
+ x509 = self._lib.sk_X509_value(sk_x509, i)
+ self.openssl_assert(x509 != self._ffi.NULL)
+ cert = self._ossl2cert(x509)
+ certs.append(cert)
+
+ return certs
+
+
+class GetCipherByName:
+ def __init__(self, fmt: str):
+ self._fmt = fmt
+
+ def __call__(self, backend: Backend, cipher: CipherAlgorithm, mode: Mode):
+ cipher_name = self._fmt.format(cipher=cipher, mode=mode).lower()
+ evp_cipher = backend._lib.EVP_get_cipherbyname(
+ cipher_name.encode("ascii")
+ )
+
+ # try EVP_CIPHER_fetch if present
+ if (
+ evp_cipher == backend._ffi.NULL
+ and backend._lib.Cryptography_HAS_300_EVP_CIPHER
+ ):
+ evp_cipher = backend._lib.EVP_CIPHER_fetch(
+ backend._ffi.NULL,
+ cipher_name.encode("ascii"),
+ backend._ffi.NULL,
+ )
+
+ backend._consume_errors()
+ return evp_cipher
+
+
+def _get_xts_cipher(backend: Backend, cipher: AES, mode):
+ cipher_name = f"aes-{cipher.key_size // 2}-xts"
+ return backend._lib.EVP_get_cipherbyname(cipher_name.encode("ascii"))
+
+
+backend = Backend()
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/ciphers.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/ciphers.py
new file mode 100644
index 00000000..bc42adbd
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/ciphers.py
@@ -0,0 +1,281 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.exceptions import InvalidTag, UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.primitives import ciphers
+from cryptography.hazmat.primitives.ciphers import algorithms, modes
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+
+class _CipherContext:
+ _ENCRYPT = 1
+ _DECRYPT = 0
+ _MAX_CHUNK_SIZE = 2**30 - 1
+
+ def __init__(self, backend: Backend, cipher, mode, operation: int) -> None:
+ self._backend = backend
+ self._cipher = cipher
+ self._mode = mode
+ self._operation = operation
+ self._tag: typing.Optional[bytes] = None
+
+ if isinstance(self._cipher, ciphers.BlockCipherAlgorithm):
+ self._block_size_bytes = self._cipher.block_size // 8
+ else:
+ self._block_size_bytes = 1
+
+ ctx = self._backend._lib.EVP_CIPHER_CTX_new()
+ ctx = self._backend._ffi.gc(
+ ctx, self._backend._lib.EVP_CIPHER_CTX_free
+ )
+
+ registry = self._backend._cipher_registry
+ try:
+ adapter = registry[type(cipher), type(mode)]
+ except KeyError:
+ raise UnsupportedAlgorithm(
+ "cipher {} in {} mode is not supported "
+ "by this backend.".format(
+ cipher.name, mode.name if mode else mode
+ ),
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ evp_cipher = adapter(self._backend, cipher, mode)
+ if evp_cipher == self._backend._ffi.NULL:
+ msg = f"cipher {cipher.name} "
+ if mode is not None:
+ msg += f"in {mode.name} mode "
+ msg += (
+ "is not supported by this backend (Your version of OpenSSL "
+ "may be too old. Current version: {}.)"
+ ).format(self._backend.openssl_version_text())
+ raise UnsupportedAlgorithm(msg, _Reasons.UNSUPPORTED_CIPHER)
+
+ if isinstance(mode, modes.ModeWithInitializationVector):
+ iv_nonce = self._backend._ffi.from_buffer(
+ mode.initialization_vector
+ )
+ elif isinstance(mode, modes.ModeWithTweak):
+ iv_nonce = self._backend._ffi.from_buffer(mode.tweak)
+ elif isinstance(mode, modes.ModeWithNonce):
+ iv_nonce = self._backend._ffi.from_buffer(mode.nonce)
+ elif isinstance(cipher, algorithms.ChaCha20):
+ iv_nonce = self._backend._ffi.from_buffer(cipher.nonce)
+ else:
+ iv_nonce = self._backend._ffi.NULL
+ # begin init with cipher and operation type
+ res = self._backend._lib.EVP_CipherInit_ex(
+ ctx,
+ evp_cipher,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ operation,
+ )
+ self._backend.openssl_assert(res != 0)
+ # set the key length to handle variable key ciphers
+ res = self._backend._lib.EVP_CIPHER_CTX_set_key_length(
+ ctx, len(cipher.key)
+ )
+ self._backend.openssl_assert(res != 0)
+ if isinstance(mode, modes.GCM):
+ res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx,
+ self._backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
+ len(iv_nonce),
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(res != 0)
+ if mode.tag is not None:
+ res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx,
+ self._backend._lib.EVP_CTRL_AEAD_SET_TAG,
+ len(mode.tag),
+ mode.tag,
+ )
+ self._backend.openssl_assert(res != 0)
+ self._tag = mode.tag
+
+ # pass key/iv
+ res = self._backend._lib.EVP_CipherInit_ex(
+ ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.from_buffer(cipher.key),
+ iv_nonce,
+ operation,
+ )
+
+ # Check for XTS mode duplicate keys error
+ errors = self._backend._consume_errors()
+ lib = self._backend._lib
+ if res == 0 and (
+ (
+ not lib.CRYPTOGRAPHY_IS_LIBRESSL
+ and errors[0]._lib_reason_match(
+ lib.ERR_LIB_EVP, lib.EVP_R_XTS_DUPLICATED_KEYS
+ )
+ )
+ or (
+ lib.Cryptography_HAS_PROVIDERS
+ and errors[0]._lib_reason_match(
+ lib.ERR_LIB_PROV, lib.PROV_R_XTS_DUPLICATED_KEYS
+ )
+ )
+ ):
+ raise ValueError("In XTS mode duplicated keys are not allowed")
+
+ self._backend.openssl_assert(res != 0, errors=errors)
+
+ # We purposely disable padding here as it's handled higher up in the
+ # API.
+ self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0)
+ self._ctx = ctx
+
+ def update(self, data: bytes) -> bytes:
+ buf = bytearray(len(data) + self._block_size_bytes - 1)
+ n = self.update_into(data, buf)
+ return bytes(buf[:n])
+
+ def update_into(self, data: bytes, buf: bytes) -> int:
+ total_data_len = len(data)
+ if len(buf) < (total_data_len + self._block_size_bytes - 1):
+ raise ValueError(
+ "buffer must be at least {} bytes for this "
+ "payload".format(len(data) + self._block_size_bytes - 1)
+ )
+
+ data_processed = 0
+ total_out = 0
+ outlen = self._backend._ffi.new("int *")
+ baseoutbuf = self._backend._ffi.from_buffer(buf, require_writable=True)
+ baseinbuf = self._backend._ffi.from_buffer(data)
+
+ while data_processed != total_data_len:
+ outbuf = baseoutbuf + total_out
+ inbuf = baseinbuf + data_processed
+ inlen = min(self._MAX_CHUNK_SIZE, total_data_len - data_processed)
+
+ res = self._backend._lib.EVP_CipherUpdate(
+ self._ctx, outbuf, outlen, inbuf, inlen
+ )
+ if res == 0 and isinstance(self._mode, modes.XTS):
+ self._backend._consume_errors()
+ raise ValueError(
+ "In XTS mode you must supply at least a full block in the "
+ "first update call. For AES this is 16 bytes."
+ )
+ else:
+ self._backend.openssl_assert(res != 0)
+ data_processed += inlen
+ total_out += outlen[0]
+
+ return total_out
+
+ def finalize(self) -> bytes:
+ if (
+ self._operation == self._DECRYPT
+ and isinstance(self._mode, modes.ModeWithAuthenticationTag)
+ and self.tag is None
+ ):
+ raise ValueError(
+ "Authentication tag must be provided when decrypting."
+ )
+
+ buf = self._backend._ffi.new("unsigned char[]", self._block_size_bytes)
+ outlen = self._backend._ffi.new("int *")
+ res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen)
+ if res == 0:
+ errors = self._backend._consume_errors()
+
+ if not errors and isinstance(self._mode, modes.GCM):
+ raise InvalidTag
+
+ lib = self._backend._lib
+ self._backend.openssl_assert(
+ errors[0]._lib_reason_match(
+ lib.ERR_LIB_EVP,
+ lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH,
+ )
+ or (
+ lib.Cryptography_HAS_PROVIDERS
+ and errors[0]._lib_reason_match(
+ lib.ERR_LIB_PROV,
+ lib.PROV_R_WRONG_FINAL_BLOCK_LENGTH,
+ )
+ )
+ or (
+ lib.CRYPTOGRAPHY_IS_BORINGSSL
+ and errors[0].reason
+ == lib.CIPHER_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH
+ ),
+ errors=errors,
+ )
+ raise ValueError(
+ "The length of the provided data is not a multiple of "
+ "the block length."
+ )
+
+ if (
+ isinstance(self._mode, modes.GCM)
+ and self._operation == self._ENCRYPT
+ ):
+ tag_buf = self._backend._ffi.new(
+ "unsigned char[]", self._block_size_bytes
+ )
+ res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
+ self._ctx,
+ self._backend._lib.EVP_CTRL_AEAD_GET_TAG,
+ self._block_size_bytes,
+ tag_buf,
+ )
+ self._backend.openssl_assert(res != 0)
+ self._tag = self._backend._ffi.buffer(tag_buf)[:]
+
+ res = self._backend._lib.EVP_CIPHER_CTX_reset(self._ctx)
+ self._backend.openssl_assert(res == 1)
+ return self._backend._ffi.buffer(buf)[: outlen[0]]
+
+ def finalize_with_tag(self, tag: bytes) -> bytes:
+ tag_len = len(tag)
+ if tag_len < self._mode._min_tag_length:
+ raise ValueError(
+ "Authentication tag must be {} bytes or longer.".format(
+ self._mode._min_tag_length
+ )
+ )
+ elif tag_len > self._block_size_bytes:
+ raise ValueError(
+ "Authentication tag cannot be more than {} bytes.".format(
+ self._block_size_bytes
+ )
+ )
+ res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
+ self._ctx, self._backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag
+ )
+ self._backend.openssl_assert(res != 0)
+ self._tag = tag
+ return self.finalize()
+
+ def authenticate_additional_data(self, data: bytes) -> None:
+ outlen = self._backend._ffi.new("int *")
+ res = self._backend._lib.EVP_CipherUpdate(
+ self._ctx,
+ self._backend._ffi.NULL,
+ outlen,
+ self._backend._ffi.from_buffer(data),
+ len(data),
+ )
+ self._backend.openssl_assert(res != 0)
+
+ @property
+ def tag(self) -> typing.Optional[bytes]:
+ return self._tag
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/cmac.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/cmac.py
new file mode 100644
index 00000000..bdd7fec6
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/cmac.py
@@ -0,0 +1,89 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.exceptions import (
+ InvalidSignature,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.primitives import constant_time
+from cryptography.hazmat.primitives.ciphers.modes import CBC
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+ from cryptography.hazmat.primitives import ciphers
+
+
+class _CMACContext:
+ def __init__(
+ self,
+ backend: Backend,
+ algorithm: ciphers.BlockCipherAlgorithm,
+ ctx=None,
+ ) -> None:
+ if not backend.cmac_algorithm_supported(algorithm):
+ raise UnsupportedAlgorithm(
+ "This backend does not support CMAC.",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ self._backend = backend
+ self._key = algorithm.key
+ self._algorithm = algorithm
+ self._output_length = algorithm.block_size // 8
+
+ if ctx is None:
+ registry = self._backend._cipher_registry
+ adapter = registry[type(algorithm), CBC]
+
+ evp_cipher = adapter(self._backend, algorithm, CBC)
+
+ ctx = self._backend._lib.CMAC_CTX_new()
+
+ self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
+ ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free)
+
+ key_ptr = self._backend._ffi.from_buffer(self._key)
+ res = self._backend._lib.CMAC_Init(
+ ctx,
+ key_ptr,
+ len(self._key),
+ evp_cipher,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(res == 1)
+
+ self._ctx = ctx
+
+ def update(self, data: bytes) -> None:
+ res = self._backend._lib.CMAC_Update(self._ctx, data, len(data))
+ self._backend.openssl_assert(res == 1)
+
+ def finalize(self) -> bytes:
+ buf = self._backend._ffi.new("unsigned char[]", self._output_length)
+ length = self._backend._ffi.new("size_t *", self._output_length)
+ res = self._backend._lib.CMAC_Final(self._ctx, buf, length)
+ self._backend.openssl_assert(res == 1)
+
+ self._ctx = None
+
+ return self._backend._ffi.buffer(buf)[:]
+
+ def copy(self) -> _CMACContext:
+ copied_ctx = self._backend._lib.CMAC_CTX_new()
+ copied_ctx = self._backend._ffi.gc(
+ copied_ctx, self._backend._lib.CMAC_CTX_free
+ )
+ res = self._backend._lib.CMAC_CTX_copy(copied_ctx, self._ctx)
+ self._backend.openssl_assert(res == 1)
+ return _CMACContext(self._backend, self._algorithm, ctx=copied_ctx)
+
+ def verify(self, signature: bytes) -> None:
+ digest = self.finalize()
+ if not constant_time.bytes_eq(digest, signature):
+ raise InvalidSignature("Signature did not match digest.")
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/decode_asn1.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/decode_asn1.py
new file mode 100644
index 00000000..bf123b62
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/decode_asn1.py
@@ -0,0 +1,32 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography import x509
+
+# CRLReason ::= ENUMERATED {
+# unspecified (0),
+# keyCompromise (1),
+# cACompromise (2),
+# affiliationChanged (3),
+# superseded (4),
+# cessationOfOperation (5),
+# certificateHold (6),
+# -- value 7 is not used
+# removeFromCRL (8),
+# privilegeWithdrawn (9),
+# aACompromise (10) }
+_CRL_ENTRY_REASON_ENUM_TO_CODE = {
+ x509.ReasonFlags.unspecified: 0,
+ x509.ReasonFlags.key_compromise: 1,
+ x509.ReasonFlags.ca_compromise: 2,
+ x509.ReasonFlags.affiliation_changed: 3,
+ x509.ReasonFlags.superseded: 4,
+ x509.ReasonFlags.cessation_of_operation: 5,
+ x509.ReasonFlags.certificate_hold: 6,
+ x509.ReasonFlags.remove_from_crl: 8,
+ x509.ReasonFlags.privilege_withdrawn: 9,
+ x509.ReasonFlags.aa_compromise: 10,
+}
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/ec.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/ec.py
new file mode 100644
index 00000000..9821bd19
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/ec.py
@@ -0,0 +1,328 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.exceptions import (
+ InvalidSignature,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends.openssl.utils import (
+ _calculate_digest_and_algorithm,
+ _evp_pkey_derive,
+)
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric import ec
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+
+def _check_signature_algorithm(
+ signature_algorithm: ec.EllipticCurveSignatureAlgorithm,
+) -> None:
+ if not isinstance(signature_algorithm, ec.ECDSA):
+ raise UnsupportedAlgorithm(
+ "Unsupported elliptic curve signature algorithm.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+
+def _ec_key_curve_sn(backend: Backend, ec_key) -> str:
+ group = backend._lib.EC_KEY_get0_group(ec_key)
+ backend.openssl_assert(group != backend._ffi.NULL)
+
+ nid = backend._lib.EC_GROUP_get_curve_name(group)
+ # The following check is to find EC keys with unnamed curves and raise
+ # an error for now.
+ if nid == backend._lib.NID_undef:
+ raise ValueError(
+ "ECDSA keys with explicit parameters are unsupported at this time"
+ )
+
+ # This is like the above check, but it also catches the case where you
+ # explicitly encoded a curve with the same parameters as a named curve.
+ # Don't do that.
+ if (
+ not backend._lib.CRYPTOGRAPHY_IS_LIBRESSL
+ and backend._lib.EC_GROUP_get_asn1_flag(group) == 0
+ ):
+ raise ValueError(
+ "ECDSA keys with explicit parameters are unsupported at this time"
+ )
+
+ curve_name = backend._lib.OBJ_nid2sn(nid)
+ backend.openssl_assert(curve_name != backend._ffi.NULL)
+
+ sn = backend._ffi.string(curve_name).decode("ascii")
+ return sn
+
+
+def _mark_asn1_named_ec_curve(backend: Backend, ec_cdata):
+ """
+ Set the named curve flag on the EC_KEY. This causes OpenSSL to
+ serialize EC keys along with their curve OID which makes
+ deserialization easier.
+ """
+
+ backend._lib.EC_KEY_set_asn1_flag(
+ ec_cdata, backend._lib.OPENSSL_EC_NAMED_CURVE
+ )
+
+
+def _check_key_infinity(backend: Backend, ec_cdata) -> None:
+ point = backend._lib.EC_KEY_get0_public_key(ec_cdata)
+ backend.openssl_assert(point != backend._ffi.NULL)
+ group = backend._lib.EC_KEY_get0_group(ec_cdata)
+ backend.openssl_assert(group != backend._ffi.NULL)
+ if backend._lib.EC_POINT_is_at_infinity(group, point):
+ raise ValueError(
+ "Cannot load an EC public key where the point is at infinity"
+ )
+
+
+def _sn_to_elliptic_curve(backend: Backend, sn: str) -> ec.EllipticCurve:
+ try:
+ return ec._CURVE_TYPES[sn]()
+ except KeyError:
+ raise UnsupportedAlgorithm(
+ f"{sn} is not a supported elliptic curve",
+ _Reasons.UNSUPPORTED_ELLIPTIC_CURVE,
+ )
+
+
+def _ecdsa_sig_sign(
+ backend: Backend, private_key: _EllipticCurvePrivateKey, data: bytes
+) -> bytes:
+ max_size = backend._lib.ECDSA_size(private_key._ec_key)
+ backend.openssl_assert(max_size > 0)
+
+ sigbuf = backend._ffi.new("unsigned char[]", max_size)
+ siglen_ptr = backend._ffi.new("unsigned int[]", 1)
+ res = backend._lib.ECDSA_sign(
+ 0, data, len(data), sigbuf, siglen_ptr, private_key._ec_key
+ )
+ backend.openssl_assert(res == 1)
+ return backend._ffi.buffer(sigbuf)[: siglen_ptr[0]]
+
+
+def _ecdsa_sig_verify(
+ backend: Backend,
+ public_key: _EllipticCurvePublicKey,
+ signature: bytes,
+ data: bytes,
+) -> None:
+ res = backend._lib.ECDSA_verify(
+ 0, data, len(data), signature, len(signature), public_key._ec_key
+ )
+ if res != 1:
+ backend._consume_errors()
+ raise InvalidSignature
+
+
+class _EllipticCurvePrivateKey(ec.EllipticCurvePrivateKey):
+ def __init__(self, backend: Backend, ec_key_cdata, evp_pkey):
+ self._backend = backend
+ self._ec_key = ec_key_cdata
+ self._evp_pkey = evp_pkey
+
+ sn = _ec_key_curve_sn(backend, ec_key_cdata)
+ self._curve = _sn_to_elliptic_curve(backend, sn)
+ _mark_asn1_named_ec_curve(backend, ec_key_cdata)
+ _check_key_infinity(backend, ec_key_cdata)
+
+ @property
+ def curve(self) -> ec.EllipticCurve:
+ return self._curve
+
+ @property
+ def key_size(self) -> int:
+ return self.curve.key_size
+
+ def exchange(
+ self, algorithm: ec.ECDH, peer_public_key: ec.EllipticCurvePublicKey
+ ) -> bytes:
+ if not (
+ self._backend.elliptic_curve_exchange_algorithm_supported(
+ algorithm, self.curve
+ )
+ ):
+ raise UnsupportedAlgorithm(
+ "This backend does not support the ECDH algorithm.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ if peer_public_key.curve.name != self.curve.name:
+ raise ValueError(
+ "peer_public_key and self are not on the same curve"
+ )
+
+ return _evp_pkey_derive(self._backend, self._evp_pkey, peer_public_key)
+
+ def public_key(self) -> ec.EllipticCurvePublicKey:
+ group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
+ self._backend.openssl_assert(group != self._backend._ffi.NULL)
+
+ curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group)
+ public_ec_key = self._backend._ec_key_new_by_curve_nid(curve_nid)
+
+ point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
+ self._backend.openssl_assert(point != self._backend._ffi.NULL)
+
+ res = self._backend._lib.EC_KEY_set_public_key(public_ec_key, point)
+ self._backend.openssl_assert(res == 1)
+
+ evp_pkey = self._backend._ec_cdata_to_evp_pkey(public_ec_key)
+
+ return _EllipticCurvePublicKey(self._backend, public_ec_key, evp_pkey)
+
+ def private_numbers(self) -> ec.EllipticCurvePrivateNumbers:
+ bn = self._backend._lib.EC_KEY_get0_private_key(self._ec_key)
+ private_value = self._backend._bn_to_int(bn)
+ return ec.EllipticCurvePrivateNumbers(
+ private_value=private_value,
+ public_numbers=self.public_key().public_numbers(),
+ )
+
+ def private_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PrivateFormat,
+ encryption_algorithm: serialization.KeySerializationEncryption,
+ ) -> bytes:
+ return self._backend._private_key_bytes(
+ encoding,
+ format,
+ encryption_algorithm,
+ self,
+ self._evp_pkey,
+ self._ec_key,
+ )
+
+ def sign(
+ self,
+ data: bytes,
+ signature_algorithm: ec.EllipticCurveSignatureAlgorithm,
+ ) -> bytes:
+ _check_signature_algorithm(signature_algorithm)
+ data, _ = _calculate_digest_and_algorithm(
+ data,
+ signature_algorithm.algorithm,
+ )
+ return _ecdsa_sig_sign(self._backend, self, data)
+
+
+class _EllipticCurvePublicKey(ec.EllipticCurvePublicKey):
+ def __init__(self, backend: Backend, ec_key_cdata, evp_pkey):
+ self._backend = backend
+ self._ec_key = ec_key_cdata
+ self._evp_pkey = evp_pkey
+
+ sn = _ec_key_curve_sn(backend, ec_key_cdata)
+ self._curve = _sn_to_elliptic_curve(backend, sn)
+ _mark_asn1_named_ec_curve(backend, ec_key_cdata)
+ _check_key_infinity(backend, ec_key_cdata)
+
+ @property
+ def curve(self) -> ec.EllipticCurve:
+ return self._curve
+
+ @property
+ def key_size(self) -> int:
+ return self.curve.key_size
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, _EllipticCurvePublicKey):
+ return NotImplemented
+
+ return (
+ self._backend._lib.EVP_PKEY_cmp(self._evp_pkey, other._evp_pkey)
+ == 1
+ )
+
+ def public_numbers(self) -> ec.EllipticCurvePublicNumbers:
+ group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
+ self._backend.openssl_assert(group != self._backend._ffi.NULL)
+
+ point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
+ self._backend.openssl_assert(point != self._backend._ffi.NULL)
+
+ with self._backend._tmp_bn_ctx() as bn_ctx:
+ bn_x = self._backend._lib.BN_CTX_get(bn_ctx)
+ bn_y = self._backend._lib.BN_CTX_get(bn_ctx)
+
+ res = self._backend._lib.EC_POINT_get_affine_coordinates(
+ group, point, bn_x, bn_y, bn_ctx
+ )
+ self._backend.openssl_assert(res == 1)
+
+ x = self._backend._bn_to_int(bn_x)
+ y = self._backend._bn_to_int(bn_y)
+
+ return ec.EllipticCurvePublicNumbers(x=x, y=y, curve=self._curve)
+
+ def _encode_point(self, format: serialization.PublicFormat) -> bytes:
+ if format is serialization.PublicFormat.CompressedPoint:
+ conversion = self._backend._lib.POINT_CONVERSION_COMPRESSED
+ else:
+ assert format is serialization.PublicFormat.UncompressedPoint
+ conversion = self._backend._lib.POINT_CONVERSION_UNCOMPRESSED
+
+ group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
+ self._backend.openssl_assert(group != self._backend._ffi.NULL)
+ point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
+ self._backend.openssl_assert(point != self._backend._ffi.NULL)
+ with self._backend._tmp_bn_ctx() as bn_ctx:
+ buflen = self._backend._lib.EC_POINT_point2oct(
+ group, point, conversion, self._backend._ffi.NULL, 0, bn_ctx
+ )
+ self._backend.openssl_assert(buflen > 0)
+ buf = self._backend._ffi.new("char[]", buflen)
+ res = self._backend._lib.EC_POINT_point2oct(
+ group, point, conversion, buf, buflen, bn_ctx
+ )
+ self._backend.openssl_assert(buflen == res)
+
+ return self._backend._ffi.buffer(buf)[:]
+
+ def public_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PublicFormat,
+ ) -> bytes:
+ if (
+ encoding is serialization.Encoding.X962
+ or format is serialization.PublicFormat.CompressedPoint
+ or format is serialization.PublicFormat.UncompressedPoint
+ ):
+ if encoding is not serialization.Encoding.X962 or format not in (
+ serialization.PublicFormat.CompressedPoint,
+ serialization.PublicFormat.UncompressedPoint,
+ ):
+ raise ValueError(
+ "X962 encoding must be used with CompressedPoint or "
+ "UncompressedPoint format"
+ )
+
+ return self._encode_point(format)
+ else:
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
+
+ def verify(
+ self,
+ signature: bytes,
+ data: bytes,
+ signature_algorithm: ec.EllipticCurveSignatureAlgorithm,
+ ) -> None:
+ _check_signature_algorithm(signature_algorithm)
+ data, _ = _calculate_digest_and_algorithm(
+ data,
+ signature_algorithm.algorithm,
+ )
+ _ecdsa_sig_verify(self._backend, self, signature, data)
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/rsa.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/rsa.py
new file mode 100644
index 00000000..ef27d4ea
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/rsa.py
@@ -0,0 +1,599 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import threading
+import typing
+
+from cryptography.exceptions import (
+ InvalidSignature,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends.openssl.utils import (
+ _calculate_digest_and_algorithm,
+)
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import utils as asym_utils
+from cryptography.hazmat.primitives.asymmetric.padding import (
+ MGF1,
+ OAEP,
+ PSS,
+ AsymmetricPadding,
+ PKCS1v15,
+ _Auto,
+ _DigestLength,
+ _MaxLength,
+ calculate_max_pss_salt_length,
+)
+from cryptography.hazmat.primitives.asymmetric.rsa import (
+ RSAPrivateKey,
+ RSAPrivateNumbers,
+ RSAPublicKey,
+ RSAPublicNumbers,
+)
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+
+def _get_rsa_pss_salt_length(
+ backend: Backend,
+ pss: PSS,
+ key: typing.Union[RSAPrivateKey, RSAPublicKey],
+ hash_algorithm: hashes.HashAlgorithm,
+) -> int:
+ salt = pss._salt_length
+
+ if isinstance(salt, _MaxLength):
+ return calculate_max_pss_salt_length(key, hash_algorithm)
+ elif isinstance(salt, _DigestLength):
+ return hash_algorithm.digest_size
+ elif isinstance(salt, _Auto):
+ if isinstance(key, RSAPrivateKey):
+ raise ValueError(
+ "PSS salt length can only be set to AUTO when verifying"
+ )
+ return backend._lib.RSA_PSS_SALTLEN_AUTO
+ else:
+ return salt
+
+
+def _enc_dec_rsa(
+ backend: Backend,
+ key: typing.Union[_RSAPrivateKey, _RSAPublicKey],
+ data: bytes,
+ padding: AsymmetricPadding,
+) -> bytes:
+ if not isinstance(padding, AsymmetricPadding):
+ raise TypeError("Padding must be an instance of AsymmetricPadding.")
+
+ if isinstance(padding, PKCS1v15):
+ padding_enum = backend._lib.RSA_PKCS1_PADDING
+ elif isinstance(padding, OAEP):
+ padding_enum = backend._lib.RSA_PKCS1_OAEP_PADDING
+
+ if not isinstance(padding._mgf, MGF1):
+ raise UnsupportedAlgorithm(
+ "Only MGF1 is supported by this backend.",
+ _Reasons.UNSUPPORTED_MGF,
+ )
+
+ if not backend.rsa_padding_supported(padding):
+ raise UnsupportedAlgorithm(
+ "This combination of padding and hash algorithm is not "
+ "supported by this backend.",
+ _Reasons.UNSUPPORTED_PADDING,
+ )
+
+ else:
+ raise UnsupportedAlgorithm(
+ f"{padding.name} is not supported by this backend.",
+ _Reasons.UNSUPPORTED_PADDING,
+ )
+
+ return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding)
+
+
+def _enc_dec_rsa_pkey_ctx(
+ backend: Backend,
+ key: typing.Union[_RSAPrivateKey, _RSAPublicKey],
+ data: bytes,
+ padding_enum: int,
+ padding: AsymmetricPadding,
+) -> bytes:
+ init: typing.Callable[[typing.Any], int]
+ crypt: typing.Callable[[typing.Any, typing.Any, int, bytes, int], int]
+ if isinstance(key, _RSAPublicKey):
+ init = backend._lib.EVP_PKEY_encrypt_init
+ crypt = backend._lib.EVP_PKEY_encrypt
+ else:
+ init = backend._lib.EVP_PKEY_decrypt_init
+ crypt = backend._lib.EVP_PKEY_decrypt
+
+ pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL)
+ backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
+ pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
+ res = init(pkey_ctx)
+ backend.openssl_assert(res == 1)
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum)
+ backend.openssl_assert(res > 0)
+ buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
+ backend.openssl_assert(buf_size > 0)
+ if isinstance(padding, OAEP):
+ mgf1_md = backend._evp_md_non_null_from_algorithm(
+ padding._mgf._algorithm
+ )
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
+ backend.openssl_assert(res > 0)
+ oaep_md = backend._evp_md_non_null_from_algorithm(padding._algorithm)
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_oaep_md(pkey_ctx, oaep_md)
+ backend.openssl_assert(res > 0)
+
+ if (
+ isinstance(padding, OAEP)
+ and padding._label is not None
+ and len(padding._label) > 0
+ ):
+ # set0_rsa_oaep_label takes ownership of the char * so we need to
+ # copy it into some new memory
+ labelptr = backend._lib.OPENSSL_malloc(len(padding._label))
+ backend.openssl_assert(labelptr != backend._ffi.NULL)
+ backend._ffi.memmove(labelptr, padding._label, len(padding._label))
+ res = backend._lib.EVP_PKEY_CTX_set0_rsa_oaep_label(
+ pkey_ctx, labelptr, len(padding._label)
+ )
+ backend.openssl_assert(res == 1)
+
+ outlen = backend._ffi.new("size_t *", buf_size)
+ buf = backend._ffi.new("unsigned char[]", buf_size)
+ # Everything from this line onwards is written with the goal of being as
+ # constant-time as is practical given the constraints of Python and our
+ # API. See Bleichenbacher's '98 attack on RSA, and its many many variants.
+ # As such, you should not attempt to change this (particularly to "clean it
+ # up") without understanding why it was written this way (see
+ # Chesterton's Fence), and without measuring to verify you have not
+ # introduced observable time differences.
+ res = crypt(pkey_ctx, buf, outlen, data, len(data))
+ resbuf = backend._ffi.buffer(buf)[: outlen[0]]
+ backend._lib.ERR_clear_error()
+ if res <= 0:
+ raise ValueError("Encryption/decryption failed.")
+ return resbuf
+
+
+def _rsa_sig_determine_padding(
+ backend: Backend,
+ key: typing.Union[_RSAPrivateKey, _RSAPublicKey],
+ padding: AsymmetricPadding,
+ algorithm: typing.Optional[hashes.HashAlgorithm],
+) -> int:
+ if not isinstance(padding, AsymmetricPadding):
+ raise TypeError("Expected provider of AsymmetricPadding.")
+
+ pkey_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
+ backend.openssl_assert(pkey_size > 0)
+
+ if isinstance(padding, PKCS1v15):
+ # Hash algorithm is ignored for PKCS1v15-padding, may be None.
+ padding_enum = backend._lib.RSA_PKCS1_PADDING
+ elif isinstance(padding, PSS):
+ if not isinstance(padding._mgf, MGF1):
+ raise UnsupportedAlgorithm(
+ "Only MGF1 is supported by this backend.",
+ _Reasons.UNSUPPORTED_MGF,
+ )
+
+ # PSS padding requires a hash algorithm
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Expected instance of hashes.HashAlgorithm.")
+
+ # Size of key in bytes - 2 is the maximum
+ # PSS signature length (salt length is checked later)
+ if pkey_size - algorithm.digest_size - 2 < 0:
+ raise ValueError(
+ "Digest too large for key size. Use a larger "
+ "key or different digest."
+ )
+
+ padding_enum = backend._lib.RSA_PKCS1_PSS_PADDING
+ else:
+ raise UnsupportedAlgorithm(
+ f"{padding.name} is not supported by this backend.",
+ _Reasons.UNSUPPORTED_PADDING,
+ )
+
+ return padding_enum
+
+
+# Hash algorithm can be absent (None) to initialize the context without setting
+# any message digest algorithm. This is currently only valid for the PKCS1v15
+# padding type, where it means that the signature data is encoded/decoded
+# as provided, without being wrapped in a DigestInfo structure.
+def _rsa_sig_setup(
+ backend: Backend,
+ padding: AsymmetricPadding,
+ algorithm: typing.Optional[hashes.HashAlgorithm],
+ key: typing.Union[_RSAPublicKey, _RSAPrivateKey],
+ init_func: typing.Callable[[typing.Any], int],
+):
+ padding_enum = _rsa_sig_determine_padding(backend, key, padding, algorithm)
+ pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL)
+ backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
+ pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
+ res = init_func(pkey_ctx)
+ if res != 1:
+ errors = backend._consume_errors()
+ raise ValueError("Unable to sign/verify with this key", errors)
+
+ if algorithm is not None:
+ evp_md = backend._evp_md_non_null_from_algorithm(algorithm)
+ res = backend._lib.EVP_PKEY_CTX_set_signature_md(pkey_ctx, evp_md)
+ if res <= 0:
+ backend._consume_errors()
+ raise UnsupportedAlgorithm(
+ "{} is not supported by this backend for RSA signing.".format(
+ algorithm.name
+ ),
+ _Reasons.UNSUPPORTED_HASH,
+ )
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum)
+ if res <= 0:
+ backend._consume_errors()
+ raise UnsupportedAlgorithm(
+ "{} is not supported for the RSA signature operation.".format(
+ padding.name
+ ),
+ _Reasons.UNSUPPORTED_PADDING,
+ )
+ if isinstance(padding, PSS):
+ assert isinstance(algorithm, hashes.HashAlgorithm)
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
+ pkey_ctx,
+ _get_rsa_pss_salt_length(backend, padding, key, algorithm),
+ )
+ backend.openssl_assert(res > 0)
+
+ mgf1_md = backend._evp_md_non_null_from_algorithm(
+ padding._mgf._algorithm
+ )
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
+ backend.openssl_assert(res > 0)
+
+ return pkey_ctx
+
+
+def _rsa_sig_sign(
+ backend: Backend,
+ padding: AsymmetricPadding,
+ algorithm: hashes.HashAlgorithm,
+ private_key: _RSAPrivateKey,
+ data: bytes,
+) -> bytes:
+ pkey_ctx = _rsa_sig_setup(
+ backend,
+ padding,
+ algorithm,
+ private_key,
+ backend._lib.EVP_PKEY_sign_init,
+ )
+ buflen = backend._ffi.new("size_t *")
+ res = backend._lib.EVP_PKEY_sign(
+ pkey_ctx, backend._ffi.NULL, buflen, data, len(data)
+ )
+ backend.openssl_assert(res == 1)
+ buf = backend._ffi.new("unsigned char[]", buflen[0])
+ res = backend._lib.EVP_PKEY_sign(pkey_ctx, buf, buflen, data, len(data))
+ if res != 1:
+ errors = backend._consume_errors()
+ raise ValueError(
+ "Digest or salt length too long for key size. Use a larger key "
+ "or shorter salt length if you are specifying a PSS salt",
+ errors,
+ )
+
+ return backend._ffi.buffer(buf)[:]
+
+
+def _rsa_sig_verify(
+ backend: Backend,
+ padding: AsymmetricPadding,
+ algorithm: hashes.HashAlgorithm,
+ public_key: _RSAPublicKey,
+ signature: bytes,
+ data: bytes,
+) -> None:
+ pkey_ctx = _rsa_sig_setup(
+ backend,
+ padding,
+ algorithm,
+ public_key,
+ backend._lib.EVP_PKEY_verify_init,
+ )
+ res = backend._lib.EVP_PKEY_verify(
+ pkey_ctx, signature, len(signature), data, len(data)
+ )
+ # The previous call can return negative numbers in the event of an
+ # error. This is not a signature failure but we need to fail if it
+ # occurs.
+ backend.openssl_assert(res >= 0)
+ if res == 0:
+ backend._consume_errors()
+ raise InvalidSignature
+
+
+def _rsa_sig_recover(
+ backend: Backend,
+ padding: AsymmetricPadding,
+ algorithm: typing.Optional[hashes.HashAlgorithm],
+ public_key: _RSAPublicKey,
+ signature: bytes,
+) -> bytes:
+ pkey_ctx = _rsa_sig_setup(
+ backend,
+ padding,
+ algorithm,
+ public_key,
+ backend._lib.EVP_PKEY_verify_recover_init,
+ )
+
+ # Attempt to keep the rest of the code in this function as constant/time
+ # as possible. See the comment in _enc_dec_rsa_pkey_ctx. Note that the
+ # buflen parameter is used even though its value may be undefined in the
+ # error case. Due to the tolerant nature of Python slicing this does not
+ # trigger any exceptions.
+ maxlen = backend._lib.EVP_PKEY_size(public_key._evp_pkey)
+ backend.openssl_assert(maxlen > 0)
+ buf = backend._ffi.new("unsigned char[]", maxlen)
+ buflen = backend._ffi.new("size_t *", maxlen)
+ res = backend._lib.EVP_PKEY_verify_recover(
+ pkey_ctx, buf, buflen, signature, len(signature)
+ )
+ resbuf = backend._ffi.buffer(buf)[: buflen[0]]
+ backend._lib.ERR_clear_error()
+ # Assume that all parameter errors are handled during the setup phase and
+ # any error here is due to invalid signature.
+ if res != 1:
+ raise InvalidSignature
+ return resbuf
+
+
+class _RSAPrivateKey(RSAPrivateKey):
+ _evp_pkey: object
+ _rsa_cdata: object
+ _key_size: int
+
+ def __init__(
+ self,
+ backend: Backend,
+ rsa_cdata,
+ evp_pkey,
+ *,
+ unsafe_skip_rsa_key_validation: bool,
+ ):
+ res: int
+ # RSA_check_key is slower in OpenSSL 3.0.0 due to improved
+ # primality checking. In normal use this is unlikely to be a problem
+ # since users don't load new keys constantly, but for TESTING we've
+ # added an init arg that allows skipping the checks. You should not
+ # use this in production code unless you understand the consequences.
+ if not unsafe_skip_rsa_key_validation:
+ res = backend._lib.RSA_check_key(rsa_cdata)
+ if res != 1:
+ errors = backend._consume_errors()
+ raise ValueError("Invalid private key", errors)
+ # 2 is prime and passes an RSA key check, so we also check
+ # if p and q are odd just to be safe.
+ p = backend._ffi.new("BIGNUM **")
+ q = backend._ffi.new("BIGNUM **")
+ backend._lib.RSA_get0_factors(rsa_cdata, p, q)
+ backend.openssl_assert(p[0] != backend._ffi.NULL)
+ backend.openssl_assert(q[0] != backend._ffi.NULL)
+ p_odd = backend._lib.BN_is_odd(p[0])
+ q_odd = backend._lib.BN_is_odd(q[0])
+ if p_odd != 1 or q_odd != 1:
+ errors = backend._consume_errors()
+ raise ValueError("Invalid private key", errors)
+
+ self._backend = backend
+ self._rsa_cdata = rsa_cdata
+ self._evp_pkey = evp_pkey
+ # Used for lazy blinding
+ self._blinded = False
+ self._blinding_lock = threading.Lock()
+
+ n = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.RSA_get0_key(
+ self._rsa_cdata,
+ n,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
+ self._key_size = self._backend._lib.BN_num_bits(n[0])
+
+ def _enable_blinding(self) -> None:
+ # If you call blind on an already blinded RSA key OpenSSL will turn
+ # it off and back on, which is a performance hit we want to avoid.
+ if not self._blinded:
+ with self._blinding_lock:
+ self._non_threadsafe_enable_blinding()
+
+ def _non_threadsafe_enable_blinding(self) -> None:
+ # This is only a separate function to allow for testing to cover both
+ # branches. It should never be invoked except through _enable_blinding.
+ # Check if it's not True again in case another thread raced past the
+ # first non-locked check.
+ if not self._blinded:
+ res = self._backend._lib.RSA_blinding_on(
+ self._rsa_cdata, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(res == 1)
+ self._blinded = True
+
+ @property
+ def key_size(self) -> int:
+ return self._key_size
+
+ def decrypt(self, ciphertext: bytes, padding: AsymmetricPadding) -> bytes:
+ self._enable_blinding()
+ key_size_bytes = (self.key_size + 7) // 8
+ if key_size_bytes != len(ciphertext):
+ raise ValueError("Ciphertext length must be equal to key size.")
+
+ return _enc_dec_rsa(self._backend, self, ciphertext, padding)
+
+ def public_key(self) -> RSAPublicKey:
+ ctx = self._backend._lib.RSAPublicKey_dup(self._rsa_cdata)
+ self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
+ ctx = self._backend._ffi.gc(ctx, self._backend._lib.RSA_free)
+ evp_pkey = self._backend._rsa_cdata_to_evp_pkey(ctx)
+ return _RSAPublicKey(self._backend, ctx, evp_pkey)
+
+ def private_numbers(self) -> RSAPrivateNumbers:
+ n = self._backend._ffi.new("BIGNUM **")
+ e = self._backend._ffi.new("BIGNUM **")
+ d = self._backend._ffi.new("BIGNUM **")
+ p = self._backend._ffi.new("BIGNUM **")
+ q = self._backend._ffi.new("BIGNUM **")
+ dmp1 = self._backend._ffi.new("BIGNUM **")
+ dmq1 = self._backend._ffi.new("BIGNUM **")
+ iqmp = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.RSA_get0_key(self._rsa_cdata, n, e, d)
+ self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(e[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(d[0] != self._backend._ffi.NULL)
+ self._backend._lib.RSA_get0_factors(self._rsa_cdata, p, q)
+ self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
+ self._backend._lib.RSA_get0_crt_params(
+ self._rsa_cdata, dmp1, dmq1, iqmp
+ )
+ self._backend.openssl_assert(dmp1[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(dmq1[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(iqmp[0] != self._backend._ffi.NULL)
+ return RSAPrivateNumbers(
+ p=self._backend._bn_to_int(p[0]),
+ q=self._backend._bn_to_int(q[0]),
+ d=self._backend._bn_to_int(d[0]),
+ dmp1=self._backend._bn_to_int(dmp1[0]),
+ dmq1=self._backend._bn_to_int(dmq1[0]),
+ iqmp=self._backend._bn_to_int(iqmp[0]),
+ public_numbers=RSAPublicNumbers(
+ e=self._backend._bn_to_int(e[0]),
+ n=self._backend._bn_to_int(n[0]),
+ ),
+ )
+
+ def private_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PrivateFormat,
+ encryption_algorithm: serialization.KeySerializationEncryption,
+ ) -> bytes:
+ return self._backend._private_key_bytes(
+ encoding,
+ format,
+ encryption_algorithm,
+ self,
+ self._evp_pkey,
+ self._rsa_cdata,
+ )
+
+ def sign(
+ self,
+ data: bytes,
+ padding: AsymmetricPadding,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> bytes:
+ self._enable_blinding()
+ data, algorithm = _calculate_digest_and_algorithm(data, algorithm)
+ return _rsa_sig_sign(self._backend, padding, algorithm, self, data)
+
+
+class _RSAPublicKey(RSAPublicKey):
+ _evp_pkey: object
+ _rsa_cdata: object
+ _key_size: int
+
+ def __init__(self, backend: Backend, rsa_cdata, evp_pkey):
+ self._backend = backend
+ self._rsa_cdata = rsa_cdata
+ self._evp_pkey = evp_pkey
+
+ n = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.RSA_get0_key(
+ self._rsa_cdata,
+ n,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
+ self._key_size = self._backend._lib.BN_num_bits(n[0])
+
+ @property
+ def key_size(self) -> int:
+ return self._key_size
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, _RSAPublicKey):
+ return NotImplemented
+
+ return (
+ self._backend._lib.EVP_PKEY_cmp(self._evp_pkey, other._evp_pkey)
+ == 1
+ )
+
+ def encrypt(self, plaintext: bytes, padding: AsymmetricPadding) -> bytes:
+ return _enc_dec_rsa(self._backend, self, plaintext, padding)
+
+ def public_numbers(self) -> RSAPublicNumbers:
+ n = self._backend._ffi.new("BIGNUM **")
+ e = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.RSA_get0_key(
+ self._rsa_cdata, n, e, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(e[0] != self._backend._ffi.NULL)
+ return RSAPublicNumbers(
+ e=self._backend._bn_to_int(e[0]),
+ n=self._backend._bn_to_int(n[0]),
+ )
+
+ def public_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PublicFormat,
+ ) -> bytes:
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, self._rsa_cdata
+ )
+
+ def verify(
+ self,
+ signature: bytes,
+ data: bytes,
+ padding: AsymmetricPadding,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> None:
+ data, algorithm = _calculate_digest_and_algorithm(data, algorithm)
+ _rsa_sig_verify(
+ self._backend, padding, algorithm, self, signature, data
+ )
+
+ def recover_data_from_signature(
+ self,
+ signature: bytes,
+ padding: AsymmetricPadding,
+ algorithm: typing.Optional[hashes.HashAlgorithm],
+ ) -> bytes:
+ if isinstance(algorithm, asym_utils.Prehashed):
+ raise TypeError(
+ "Prehashed is only supported in the sign and verify methods. "
+ "It cannot be used with recover_data_from_signature."
+ )
+ return _rsa_sig_recover(
+ self._backend, padding, algorithm, self, signature
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/utils.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/utils.py
new file mode 100644
index 00000000..5b404def
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/utils.py
@@ -0,0 +1,63 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.asymmetric.utils import Prehashed
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+
+def _evp_pkey_derive(backend: Backend, evp_pkey, peer_public_key) -> bytes:
+ ctx = backend._lib.EVP_PKEY_CTX_new(evp_pkey, backend._ffi.NULL)
+ backend.openssl_assert(ctx != backend._ffi.NULL)
+ ctx = backend._ffi.gc(ctx, backend._lib.EVP_PKEY_CTX_free)
+ res = backend._lib.EVP_PKEY_derive_init(ctx)
+ backend.openssl_assert(res == 1)
+
+ if backend._lib.Cryptography_HAS_EVP_PKEY_SET_PEER_EX:
+ res = backend._lib.EVP_PKEY_derive_set_peer_ex(
+ ctx, peer_public_key._evp_pkey, 0
+ )
+ else:
+ res = backend._lib.EVP_PKEY_derive_set_peer(
+ ctx, peer_public_key._evp_pkey
+ )
+ backend.openssl_assert(res == 1)
+
+ keylen = backend._ffi.new("size_t *")
+ res = backend._lib.EVP_PKEY_derive(ctx, backend._ffi.NULL, keylen)
+ backend.openssl_assert(res == 1)
+ backend.openssl_assert(keylen[0] > 0)
+ buf = backend._ffi.new("unsigned char[]", keylen[0])
+ res = backend._lib.EVP_PKEY_derive(ctx, buf, keylen)
+ if res != 1:
+ errors = backend._consume_errors()
+ raise ValueError("Error computing shared key.", errors)
+
+ return backend._ffi.buffer(buf, keylen[0])[:]
+
+
+def _calculate_digest_and_algorithm(
+ data: bytes,
+ algorithm: typing.Union[Prehashed, hashes.HashAlgorithm],
+) -> typing.Tuple[bytes, hashes.HashAlgorithm]:
+ if not isinstance(algorithm, Prehashed):
+ hash_ctx = hashes.Hash(algorithm)
+ hash_ctx.update(data)
+ data = hash_ctx.finalize()
+ else:
+ algorithm = algorithm._algorithm
+
+ if len(data) != algorithm.digest_size:
+ raise ValueError(
+ "The provided data must be the same length as the hash "
+ "algorithm's digest size."
+ )
+
+ return (data, algorithm)
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__init__.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__init__.py
new file mode 100644
index 00000000..b5093362
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__init__.py
@@ -0,0 +1,3 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..b018cd61
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust.abi3.so b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust.abi3.so
new file mode 100755
index 00000000..de9d24d4
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust.abi3.so differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi
new file mode 100644
index 00000000..94a37a20
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi
@@ -0,0 +1,34 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import types
+import typing
+
+def check_pkcs7_padding(data: bytes) -> bool: ...
+def check_ansix923_padding(data: bytes) -> bool: ...
+
+class ObjectIdentifier:
+ def __init__(self, val: str) -> None: ...
+ @property
+ def dotted_string(self) -> str: ...
+ @property
+ def _name(self) -> str: ...
+
+T = typing.TypeVar("T")
+
+class FixedPool(typing.Generic[T]):
+ def __init__(
+ self,
+ create: typing.Callable[[], T],
+ ) -> None: ...
+ def acquire(self) -> PoolAcquisition[T]: ...
+
+class PoolAcquisition(typing.Generic[T]):
+ def __enter__(self) -> T: ...
+ def __exit__(
+ self,
+ exc_type: typing.Optional[typing.Type[BaseException]],
+ exc_value: typing.Optional[BaseException],
+ exc_tb: typing.Optional[types.TracebackType],
+ ) -> None: ...
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi
new file mode 100644
index 00000000..80100082
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi
@@ -0,0 +1,8 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+lib = typing.Any
+ffi = typing.Any
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi
new file mode 100644
index 00000000..a8369ba8
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi
@@ -0,0 +1,16 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+class TestCertificate:
+ not_after_tag: int
+ not_before_tag: int
+ issuer_value_tags: typing.List[int]
+ subject_value_tags: typing.List[int]
+
+def decode_dss_signature(signature: bytes) -> typing.Tuple[int, int]: ...
+def encode_dss_signature(r: int, s: int) -> bytes: ...
+def parse_spki_for_data(data: bytes) -> bytes: ...
+def test_parse_certificate(data: bytes) -> TestCertificate: ...
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi
new file mode 100644
index 00000000..09f46b1e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi
@@ -0,0 +1,17 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+class _Reasons:
+ BACKEND_MISSING_INTERFACE: _Reasons
+ UNSUPPORTED_HASH: _Reasons
+ UNSUPPORTED_CIPHER: _Reasons
+ UNSUPPORTED_PADDING: _Reasons
+ UNSUPPORTED_MGF: _Reasons
+ UNSUPPORTED_PUBLIC_KEY_ALGORITHM: _Reasons
+ UNSUPPORTED_ELLIPTIC_CURVE: _Reasons
+ UNSUPPORTED_SERIALIZATION: _Reasons
+ UNSUPPORTED_X509: _Reasons
+ UNSUPPORTED_EXCHANGE_ALGORITHM: _Reasons
+ UNSUPPORTED_DIFFIE_HELLMAN: _Reasons
+ UNSUPPORTED_MAC: _Reasons
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi
new file mode 100644
index 00000000..4671eb9b
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi
@@ -0,0 +1,25 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
+from cryptography.x509.ocsp import (
+ OCSPRequest,
+ OCSPRequestBuilder,
+ OCSPResponse,
+ OCSPResponseBuilder,
+ OCSPResponseStatus,
+)
+
+def load_der_ocsp_request(data: bytes) -> OCSPRequest: ...
+def load_der_ocsp_response(data: bytes) -> OCSPResponse: ...
+def create_ocsp_request(builder: OCSPRequestBuilder) -> OCSPRequest: ...
+def create_ocsp_response(
+ status: OCSPResponseStatus,
+ builder: typing.Optional[OCSPResponseBuilder],
+ private_key: typing.Optional[PrivateKeyTypes],
+ hash_algorithm: typing.Optional[hashes.HashAlgorithm],
+) -> OCSPResponse: ...
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi
new file mode 100644
index 00000000..82f30d20
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi
@@ -0,0 +1,47 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.hazmat.bindings._rust.openssl import (
+ dh,
+ dsa,
+ ed448,
+ ed25519,
+ hashes,
+ hmac,
+ kdf,
+ poly1305,
+ x448,
+ x25519,
+)
+
+__all__ = [
+ "openssl_version",
+ "raise_openssl_error",
+ "dh",
+ "dsa",
+ "hashes",
+ "hmac",
+ "kdf",
+ "ed448",
+ "ed25519",
+ "poly1305",
+ "x448",
+ "x25519",
+]
+
+def openssl_version() -> int: ...
+def raise_openssl_error() -> typing.NoReturn: ...
+def capture_error_stack() -> typing.List[OpenSSLError]: ...
+def is_fips_enabled() -> bool: ...
+
+class OpenSSLError:
+ @property
+ def lib(self) -> int: ...
+ @property
+ def reason(self) -> int: ...
+ @property
+ def reason_text(self) -> bytes: ...
+ def _lib_reason_match(self, lib: int, reason: int) -> bool: ...
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi
new file mode 100644
index 00000000..bfd005d9
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi
@@ -0,0 +1,22 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from cryptography.hazmat.primitives.asymmetric import dh
+
+MIN_MODULUS_SIZE: int
+
+class DHPrivateKey: ...
+class DHPublicKey: ...
+class DHParameters: ...
+
+def generate_parameters(generator: int, key_size: int) -> dh.DHParameters: ...
+def private_key_from_ptr(ptr: int) -> dh.DHPrivateKey: ...
+def public_key_from_ptr(ptr: int) -> dh.DHPublicKey: ...
+def from_pem_parameters(data: bytes) -> dh.DHParameters: ...
+def from_der_parameters(data: bytes) -> dh.DHParameters: ...
+def from_private_numbers(numbers: dh.DHPrivateNumbers) -> dh.DHPrivateKey: ...
+def from_public_numbers(numbers: dh.DHPublicNumbers) -> dh.DHPublicKey: ...
+def from_parameter_numbers(
+ numbers: dh.DHParameterNumbers,
+) -> dh.DHParameters: ...
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi
new file mode 100644
index 00000000..5a56f256
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi
@@ -0,0 +1,20 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from cryptography.hazmat.primitives.asymmetric import dsa
+
+class DSAPrivateKey: ...
+class DSAPublicKey: ...
+class DSAParameters: ...
+
+def generate_parameters(key_size: int) -> dsa.DSAParameters: ...
+def private_key_from_ptr(ptr: int) -> dsa.DSAPrivateKey: ...
+def public_key_from_ptr(ptr: int) -> dsa.DSAPublicKey: ...
+def from_private_numbers(
+ numbers: dsa.DSAPrivateNumbers,
+) -> dsa.DSAPrivateKey: ...
+def from_public_numbers(numbers: dsa.DSAPublicNumbers) -> dsa.DSAPublicKey: ...
+def from_parameter_numbers(
+ numbers: dsa.DSAParameterNumbers,
+) -> dsa.DSAParameters: ...
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi
new file mode 100644
index 00000000..c7f127f0
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi
@@ -0,0 +1,14 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from cryptography.hazmat.primitives.asymmetric import ed25519
+
+class Ed25519PrivateKey: ...
+class Ed25519PublicKey: ...
+
+def generate_key() -> ed25519.Ed25519PrivateKey: ...
+def private_key_from_ptr(ptr: int) -> ed25519.Ed25519PrivateKey: ...
+def public_key_from_ptr(ptr: int) -> ed25519.Ed25519PublicKey: ...
+def from_private_bytes(data: bytes) -> ed25519.Ed25519PrivateKey: ...
+def from_public_bytes(data: bytes) -> ed25519.Ed25519PublicKey: ...
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi
new file mode 100644
index 00000000..1cf5f177
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi
@@ -0,0 +1,14 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from cryptography.hazmat.primitives.asymmetric import ed448
+
+class Ed448PrivateKey: ...
+class Ed448PublicKey: ...
+
+def generate_key() -> ed448.Ed448PrivateKey: ...
+def private_key_from_ptr(ptr: int) -> ed448.Ed448PrivateKey: ...
+def public_key_from_ptr(ptr: int) -> ed448.Ed448PublicKey: ...
+def from_private_bytes(data: bytes) -> ed448.Ed448PrivateKey: ...
+def from_public_bytes(data: bytes) -> ed448.Ed448PublicKey: ...
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi
new file mode 100644
index 00000000..ca5f42a0
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi
@@ -0,0 +1,17 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.hazmat.primitives import hashes
+
+class Hash(hashes.HashContext):
+ def __init__(
+ self, algorithm: hashes.HashAlgorithm, backend: typing.Any = None
+ ) -> None: ...
+ @property
+ def algorithm(self) -> hashes.HashAlgorithm: ...
+ def update(self, data: bytes) -> None: ...
+ def finalize(self) -> bytes: ...
+ def copy(self) -> Hash: ...
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi
new file mode 100644
index 00000000..e38d9b54
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi
@@ -0,0 +1,21 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.hazmat.primitives import hashes
+
+class HMAC(hashes.HashContext):
+ def __init__(
+ self,
+ key: bytes,
+ algorithm: hashes.HashAlgorithm,
+ backend: typing.Any = None,
+ ) -> None: ...
+ @property
+ def algorithm(self) -> hashes.HashAlgorithm: ...
+ def update(self, data: bytes) -> None: ...
+ def finalize(self) -> bytes: ...
+ def verify(self, signature: bytes) -> None: ...
+ def copy(self) -> HMAC: ...
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi
new file mode 100644
index 00000000..034a8fed
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi
@@ -0,0 +1,22 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from cryptography.hazmat.primitives.hashes import HashAlgorithm
+
+def derive_pbkdf2_hmac(
+ key_material: bytes,
+ algorithm: HashAlgorithm,
+ salt: bytes,
+ iterations: int,
+ length: int,
+) -> bytes: ...
+def derive_scrypt(
+ key_material: bytes,
+ salt: bytes,
+ n: int,
+ r: int,
+ p: int,
+ max_mem: int,
+ length: int,
+) -> bytes: ...
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi
new file mode 100644
index 00000000..2e9b0a9e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi
@@ -0,0 +1,13 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+class Poly1305:
+ def __init__(self, key: bytes) -> None: ...
+ @staticmethod
+ def generate_tag(key: bytes, data: bytes) -> bytes: ...
+ @staticmethod
+ def verify_tag(key: bytes, data: bytes, tag: bytes) -> None: ...
+ def update(self, data: bytes) -> None: ...
+ def finalize(self) -> bytes: ...
+ def verify(self, tag: bytes) -> None: ...
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi
new file mode 100644
index 00000000..90f7cbdd
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi
@@ -0,0 +1,14 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from cryptography.hazmat.primitives.asymmetric import x25519
+
+class X25519PrivateKey: ...
+class X25519PublicKey: ...
+
+def generate_key() -> x25519.X25519PrivateKey: ...
+def private_key_from_ptr(ptr: int) -> x25519.X25519PrivateKey: ...
+def public_key_from_ptr(ptr: int) -> x25519.X25519PublicKey: ...
+def from_private_bytes(data: bytes) -> x25519.X25519PrivateKey: ...
+def from_public_bytes(data: bytes) -> x25519.X25519PublicKey: ...
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi
new file mode 100644
index 00000000..d326c8d2
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi
@@ -0,0 +1,14 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from cryptography.hazmat.primitives.asymmetric import x448
+
+class X448PrivateKey: ...
+class X448PublicKey: ...
+
+def generate_key() -> x448.X448PrivateKey: ...
+def private_key_from_ptr(ptr: int) -> x448.X448PrivateKey: ...
+def public_key_from_ptr(ptr: int) -> x448.X448PublicKey: ...
+def from_private_bytes(data: bytes) -> x448.X448PrivateKey: ...
+def from_public_bytes(data: bytes) -> x448.X448PublicKey: ...
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi
new file mode 100644
index 00000000..66bd8509
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi
@@ -0,0 +1,15 @@
+import typing
+
+from cryptography import x509
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.serialization import pkcs7
+
+def serialize_certificates(
+ certs: typing.List[x509.Certificate],
+ encoding: serialization.Encoding,
+) -> bytes: ...
+def sign_and_serialize(
+ builder: pkcs7.PKCS7SignatureBuilder,
+ encoding: serialization.Encoding,
+ options: typing.Iterable[pkcs7.PKCS7Options],
+) -> bytes: ...
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi
new file mode 100644
index 00000000..24b2f5e3
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi
@@ -0,0 +1,44 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography import x509
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.asymmetric.padding import PSS, PKCS1v15
+from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
+
+def load_pem_x509_certificate(data: bytes) -> x509.Certificate: ...
+def load_pem_x509_certificates(
+ data: bytes,
+) -> typing.List[x509.Certificate]: ...
+def load_der_x509_certificate(data: bytes) -> x509.Certificate: ...
+def load_pem_x509_crl(data: bytes) -> x509.CertificateRevocationList: ...
+def load_der_x509_crl(data: bytes) -> x509.CertificateRevocationList: ...
+def load_pem_x509_csr(data: bytes) -> x509.CertificateSigningRequest: ...
+def load_der_x509_csr(data: bytes) -> x509.CertificateSigningRequest: ...
+def encode_name_bytes(name: x509.Name) -> bytes: ...
+def encode_extension_value(extension: x509.ExtensionType) -> bytes: ...
+def create_x509_certificate(
+ builder: x509.CertificateBuilder,
+ private_key: PrivateKeyTypes,
+ hash_algorithm: typing.Optional[hashes.HashAlgorithm],
+ padding: typing.Optional[typing.Union[PKCS1v15, PSS]],
+) -> x509.Certificate: ...
+def create_x509_csr(
+ builder: x509.CertificateSigningRequestBuilder,
+ private_key: PrivateKeyTypes,
+ hash_algorithm: typing.Optional[hashes.HashAlgorithm],
+) -> x509.CertificateSigningRequest: ...
+def create_x509_crl(
+ builder: x509.CertificateRevocationListBuilder,
+ private_key: PrivateKeyTypes,
+ hash_algorithm: typing.Optional[hashes.HashAlgorithm],
+) -> x509.CertificateRevocationList: ...
+
+class Sct: ...
+class Certificate: ...
+class RevokedCertificate: ...
+class CertificateRevocationList: ...
+class CertificateSigningRequest: ...
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__init__.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__init__.py
new file mode 100644
index 00000000..b5093362
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__init__.py
@@ -0,0 +1,3 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..72db8cd8
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc
new file mode 100644
index 00000000..ced5cc3c
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc
new file mode 100644
index 00000000..086ac340
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py
new file mode 100644
index 00000000..5e8ecd04
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py
@@ -0,0 +1,329 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+
+def cryptography_has_set_cert_cb() -> typing.List[str]:
+ return [
+ "SSL_CTX_set_cert_cb",
+ "SSL_set_cert_cb",
+ ]
+
+
+def cryptography_has_ssl_st() -> typing.List[str]:
+ return [
+ "SSL_ST_BEFORE",
+ "SSL_ST_OK",
+ "SSL_ST_INIT",
+ "SSL_ST_RENEGOTIATE",
+ ]
+
+
+def cryptography_has_tls_st() -> typing.List[str]:
+ return [
+ "TLS_ST_BEFORE",
+ "TLS_ST_OK",
+ ]
+
+
+def cryptography_has_evp_pkey_dhx() -> typing.List[str]:
+ return [
+ "EVP_PKEY_DHX",
+ ]
+
+
+def cryptography_has_mem_functions() -> typing.List[str]:
+ return [
+ "Cryptography_CRYPTO_set_mem_functions",
+ ]
+
+
+def cryptography_has_x509_store_ctx_get_issuer() -> typing.List[str]:
+ return [
+ "X509_STORE_set_get_issuer",
+ ]
+
+
+def cryptography_has_ed448() -> typing.List[str]:
+ return [
+ "EVP_PKEY_ED448",
+ "NID_ED448",
+ ]
+
+
+def cryptography_has_ed25519() -> typing.List[str]:
+ return [
+ "NID_ED25519",
+ "EVP_PKEY_ED25519",
+ ]
+
+
+def cryptography_has_poly1305() -> typing.List[str]:
+ return [
+ "NID_poly1305",
+ "EVP_PKEY_POLY1305",
+ ]
+
+
+def cryptography_has_evp_digestfinal_xof() -> typing.List[str]:
+ return [
+ "EVP_DigestFinalXOF",
+ ]
+
+
+def cryptography_has_fips() -> typing.List[str]:
+ return [
+ "FIPS_mode_set",
+ "FIPS_mode",
+ ]
+
+
+def cryptography_has_ssl_sigalgs() -> typing.List[str]:
+ return [
+ "SSL_CTX_set1_sigalgs_list",
+ ]
+
+
+def cryptography_has_psk() -> typing.List[str]:
+ return [
+ "SSL_CTX_use_psk_identity_hint",
+ "SSL_CTX_set_psk_server_callback",
+ "SSL_CTX_set_psk_client_callback",
+ ]
+
+
+def cryptography_has_psk_tlsv13() -> typing.List[str]:
+ return [
+ "SSL_CTX_set_psk_find_session_callback",
+ "SSL_CTX_set_psk_use_session_callback",
+ "Cryptography_SSL_SESSION_new",
+ "SSL_CIPHER_find",
+ "SSL_SESSION_set1_master_key",
+ "SSL_SESSION_set_cipher",
+ "SSL_SESSION_set_protocol_version",
+ ]
+
+
+def cryptography_has_custom_ext() -> typing.List[str]:
+ return [
+ "SSL_CTX_add_client_custom_ext",
+ "SSL_CTX_add_server_custom_ext",
+ "SSL_extension_supported",
+ ]
+
+
+def cryptography_has_tlsv13_functions() -> typing.List[str]:
+ return [
+ "SSL_VERIFY_POST_HANDSHAKE",
+ "SSL_CTX_set_ciphersuites",
+ "SSL_verify_client_post_handshake",
+ "SSL_CTX_set_post_handshake_auth",
+ "SSL_set_post_handshake_auth",
+ "SSL_SESSION_get_max_early_data",
+ "SSL_write_early_data",
+ "SSL_read_early_data",
+ "SSL_CTX_set_max_early_data",
+ ]
+
+
+def cryptography_has_raw_key() -> typing.List[str]:
+ return [
+ "EVP_PKEY_new_raw_private_key",
+ "EVP_PKEY_new_raw_public_key",
+ "EVP_PKEY_get_raw_private_key",
+ "EVP_PKEY_get_raw_public_key",
+ ]
+
+
+def cryptography_has_engine() -> typing.List[str]:
+ return [
+ "ENGINE_by_id",
+ "ENGINE_init",
+ "ENGINE_finish",
+ "ENGINE_get_default_RAND",
+ "ENGINE_set_default_RAND",
+ "ENGINE_unregister_RAND",
+ "ENGINE_ctrl_cmd",
+ "ENGINE_free",
+ "ENGINE_get_name",
+ "ENGINE_ctrl_cmd_string",
+ "ENGINE_load_builtin_engines",
+ "ENGINE_load_private_key",
+ "ENGINE_load_public_key",
+ "SSL_CTX_set_client_cert_engine",
+ ]
+
+
+def cryptography_has_verified_chain() -> typing.List[str]:
+ return [
+ "SSL_get0_verified_chain",
+ ]
+
+
+def cryptography_has_srtp() -> typing.List[str]:
+ return [
+ "SSL_CTX_set_tlsext_use_srtp",
+ "SSL_set_tlsext_use_srtp",
+ "SSL_get_selected_srtp_profile",
+ ]
+
+
+def cryptography_has_providers() -> typing.List[str]:
+ return [
+ "OSSL_PROVIDER_load",
+ "OSSL_PROVIDER_unload",
+ "ERR_LIB_PROV",
+ "PROV_R_WRONG_FINAL_BLOCK_LENGTH",
+ "PROV_R_BAD_DECRYPT",
+ ]
+
+
+def cryptography_has_op_no_renegotiation() -> typing.List[str]:
+ return [
+ "SSL_OP_NO_RENEGOTIATION",
+ ]
+
+
+def cryptography_has_dtls_get_data_mtu() -> typing.List[str]:
+ return [
+ "DTLS_get_data_mtu",
+ ]
+
+
+def cryptography_has_300_fips() -> typing.List[str]:
+ return [
+ "EVP_default_properties_is_fips_enabled",
+ "EVP_default_properties_enable_fips",
+ ]
+
+
+def cryptography_has_ssl_cookie() -> typing.List[str]:
+ return [
+ "SSL_OP_COOKIE_EXCHANGE",
+ "DTLSv1_listen",
+ "SSL_CTX_set_cookie_generate_cb",
+ "SSL_CTX_set_cookie_verify_cb",
+ ]
+
+
+def cryptography_has_pkcs7_funcs() -> typing.List[str]:
+ return [
+ "SMIME_write_PKCS7",
+ "PEM_write_bio_PKCS7_stream",
+ "PKCS7_sign_add_signer",
+ "PKCS7_final",
+ "PKCS7_verify",
+ "SMIME_read_PKCS7",
+ "PKCS7_get0_signers",
+ ]
+
+
+def cryptography_has_bn_flags() -> typing.List[str]:
+ return [
+ "BN_FLG_CONSTTIME",
+ "BN_set_flags",
+ "BN_prime_checks_for_size",
+ ]
+
+
+def cryptography_has_evp_pkey_dh() -> typing.List[str]:
+ return [
+ "EVP_PKEY_set1_DH",
+ ]
+
+
+def cryptography_has_300_evp_cipher() -> typing.List[str]:
+ return ["EVP_CIPHER_fetch", "EVP_CIPHER_free"]
+
+
+def cryptography_has_unexpected_eof_while_reading() -> typing.List[str]:
+ return ["SSL_R_UNEXPECTED_EOF_WHILE_READING"]
+
+
+def cryptography_has_pkcs12_set_mac() -> typing.List[str]:
+ return ["PKCS12_set_mac"]
+
+
+def cryptography_has_ssl_op_ignore_unexpected_eof() -> typing.List[str]:
+ return [
+ "SSL_OP_IGNORE_UNEXPECTED_EOF",
+ ]
+
+
+def cryptography_has_get_extms_support() -> typing.List[str]:
+ return ["SSL_get_extms_support"]
+
+
+def cryptography_has_evp_pkey_set_peer_ex() -> typing.List[str]:
+ return ["EVP_PKEY_derive_set_peer_ex"]
+
+
+def cryptography_has_evp_aead() -> typing.List[str]:
+ return [
+ "EVP_aead_chacha20_poly1305",
+ "EVP_AEAD_CTX_free",
+ "EVP_AEAD_CTX_seal",
+ "EVP_AEAD_CTX_open",
+ "EVP_AEAD_max_overhead",
+ "Cryptography_EVP_AEAD_CTX_new",
+ ]
+
+
+# This is a mapping of
+# {condition: function-returning-names-dependent-on-that-condition} so we can
+# loop over them and delete unsupported names at runtime. It will be removed
+# when cffi supports #if in cdef. We use functions instead of just a dict of
+# lists so we can use coverage to measure which are used.
+CONDITIONAL_NAMES = {
+ "Cryptography_HAS_SET_CERT_CB": cryptography_has_set_cert_cb,
+ "Cryptography_HAS_SSL_ST": cryptography_has_ssl_st,
+ "Cryptography_HAS_TLS_ST": cryptography_has_tls_st,
+ "Cryptography_HAS_EVP_PKEY_DHX": cryptography_has_evp_pkey_dhx,
+ "Cryptography_HAS_MEM_FUNCTIONS": cryptography_has_mem_functions,
+ "Cryptography_HAS_X509_STORE_CTX_GET_ISSUER": (
+ cryptography_has_x509_store_ctx_get_issuer
+ ),
+ "Cryptography_HAS_ED448": cryptography_has_ed448,
+ "Cryptography_HAS_ED25519": cryptography_has_ed25519,
+ "Cryptography_HAS_POLY1305": cryptography_has_poly1305,
+ "Cryptography_HAS_FIPS": cryptography_has_fips,
+ "Cryptography_HAS_SIGALGS": cryptography_has_ssl_sigalgs,
+ "Cryptography_HAS_PSK": cryptography_has_psk,
+ "Cryptography_HAS_PSK_TLSv1_3": cryptography_has_psk_tlsv13,
+ "Cryptography_HAS_CUSTOM_EXT": cryptography_has_custom_ext,
+ "Cryptography_HAS_TLSv1_3_FUNCTIONS": cryptography_has_tlsv13_functions,
+ "Cryptography_HAS_RAW_KEY": cryptography_has_raw_key,
+ "Cryptography_HAS_EVP_DIGESTFINAL_XOF": (
+ cryptography_has_evp_digestfinal_xof
+ ),
+ "Cryptography_HAS_ENGINE": cryptography_has_engine,
+ "Cryptography_HAS_VERIFIED_CHAIN": cryptography_has_verified_chain,
+ "Cryptography_HAS_SRTP": cryptography_has_srtp,
+ "Cryptography_HAS_PROVIDERS": cryptography_has_providers,
+ "Cryptography_HAS_OP_NO_RENEGOTIATION": (
+ cryptography_has_op_no_renegotiation
+ ),
+ "Cryptography_HAS_DTLS_GET_DATA_MTU": cryptography_has_dtls_get_data_mtu,
+ "Cryptography_HAS_300_FIPS": cryptography_has_300_fips,
+ "Cryptography_HAS_SSL_COOKIE": cryptography_has_ssl_cookie,
+ "Cryptography_HAS_PKCS7_FUNCS": cryptography_has_pkcs7_funcs,
+ "Cryptography_HAS_BN_FLAGS": cryptography_has_bn_flags,
+ "Cryptography_HAS_EVP_PKEY_DH": cryptography_has_evp_pkey_dh,
+ "Cryptography_HAS_300_EVP_CIPHER": cryptography_has_300_evp_cipher,
+ "Cryptography_HAS_UNEXPECTED_EOF_WHILE_READING": (
+ cryptography_has_unexpected_eof_while_reading
+ ),
+ "Cryptography_HAS_PKCS12_SET_MAC": cryptography_has_pkcs12_set_mac,
+ "Cryptography_HAS_SSL_OP_IGNORE_UNEXPECTED_EOF": (
+ cryptography_has_ssl_op_ignore_unexpected_eof
+ ),
+ "Cryptography_HAS_GET_EXTMS_SUPPORT": cryptography_has_get_extms_support,
+ "Cryptography_HAS_EVP_PKEY_SET_PEER_EX": (
+ cryptography_has_evp_pkey_set_peer_ex
+ ),
+ "Cryptography_HAS_EVP_AEAD": (cryptography_has_evp_aead),
+}
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/binding.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/binding.py
new file mode 100644
index 00000000..b50d6315
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/bindings/openssl/binding.py
@@ -0,0 +1,179 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import os
+import sys
+import threading
+import types
+import typing
+import warnings
+
+import cryptography
+from cryptography.exceptions import InternalError
+from cryptography.hazmat.bindings._rust import _openssl, openssl
+from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES
+
+
+def _openssl_assert(
+ lib,
+ ok: bool,
+ errors: typing.Optional[typing.List[openssl.OpenSSLError]] = None,
+) -> None:
+ if not ok:
+ if errors is None:
+ errors = openssl.capture_error_stack()
+
+ raise InternalError(
+ "Unknown OpenSSL error. This error is commonly encountered when "
+ "another library is not cleaning up the OpenSSL error stack. If "
+ "you are using cryptography with another library that uses "
+ "OpenSSL try disabling it before reporting a bug. Otherwise "
+ "please file an issue at https://github.com/pyca/cryptography/"
+ "issues with information on how to reproduce "
+ "this. ({!r})".format(errors),
+ errors,
+ )
+
+
+def _legacy_provider_error(loaded: bool) -> None:
+ if not loaded:
+ raise RuntimeError(
+ "OpenSSL 3.0's legacy provider failed to load. This is a fatal "
+ "error by default, but cryptography supports running without "
+ "legacy algorithms by setting the environment variable "
+ "CRYPTOGRAPHY_OPENSSL_NO_LEGACY. If you did not expect this error,"
+ " you have likely made a mistake with your OpenSSL configuration."
+ )
+
+
+def build_conditional_library(
+ lib: typing.Any,
+ conditional_names: typing.Dict[str, typing.Callable[[], typing.List[str]]],
+) -> typing.Any:
+ conditional_lib = types.ModuleType("lib")
+ conditional_lib._original_lib = lib # type: ignore[attr-defined]
+ excluded_names = set()
+ for condition, names_cb in conditional_names.items():
+ if not getattr(lib, condition):
+ excluded_names.update(names_cb())
+
+ for attr in dir(lib):
+ if attr not in excluded_names:
+ setattr(conditional_lib, attr, getattr(lib, attr))
+
+ return conditional_lib
+
+
+class Binding:
+ """
+ OpenSSL API wrapper.
+ """
+
+ lib: typing.ClassVar = None
+ ffi = _openssl.ffi
+ _lib_loaded = False
+ _init_lock = threading.Lock()
+ _legacy_provider: typing.Any = ffi.NULL
+ _legacy_provider_loaded = False
+ _default_provider: typing.Any = ffi.NULL
+
+ def __init__(self) -> None:
+ self._ensure_ffi_initialized()
+
+ def _enable_fips(self) -> None:
+ # This function enables FIPS mode for OpenSSL 3.0.0 on installs that
+ # have the FIPS provider installed properly.
+ _openssl_assert(self.lib, self.lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER)
+ self._base_provider = self.lib.OSSL_PROVIDER_load(
+ self.ffi.NULL, b"base"
+ )
+ _openssl_assert(self.lib, self._base_provider != self.ffi.NULL)
+ self.lib._fips_provider = self.lib.OSSL_PROVIDER_load(
+ self.ffi.NULL, b"fips"
+ )
+ _openssl_assert(self.lib, self.lib._fips_provider != self.ffi.NULL)
+
+ res = self.lib.EVP_default_properties_enable_fips(self.ffi.NULL, 1)
+ _openssl_assert(self.lib, res == 1)
+
+ @classmethod
+ def _ensure_ffi_initialized(cls) -> None:
+ with cls._init_lock:
+ if not cls._lib_loaded:
+ cls.lib = build_conditional_library(
+ _openssl.lib, CONDITIONAL_NAMES
+ )
+ cls._lib_loaded = True
+ # As of OpenSSL 3.0.0 we must register a legacy cipher provider
+ # to get RC2 (needed for junk asymmetric private key
+ # serialization), RC4, Blowfish, IDEA, SEED, etc. These things
+ # are ugly legacy, but we aren't going to get rid of them
+ # any time soon.
+ if cls.lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER:
+ if not os.environ.get("CRYPTOGRAPHY_OPENSSL_NO_LEGACY"):
+ cls._legacy_provider = cls.lib.OSSL_PROVIDER_load(
+ cls.ffi.NULL, b"legacy"
+ )
+ cls._legacy_provider_loaded = (
+ cls._legacy_provider != cls.ffi.NULL
+ )
+ _legacy_provider_error(cls._legacy_provider_loaded)
+
+ cls._default_provider = cls.lib.OSSL_PROVIDER_load(
+ cls.ffi.NULL, b"default"
+ )
+ _openssl_assert(
+ cls.lib, cls._default_provider != cls.ffi.NULL
+ )
+
+ @classmethod
+ def init_static_locks(cls) -> None:
+ cls._ensure_ffi_initialized()
+
+
+def _verify_package_version(version: str) -> None:
+ # Occasionally we run into situations where the version of the Python
+ # package does not match the version of the shared object that is loaded.
+ # This may occur in environments where multiple versions of cryptography
+ # are installed and available in the python path. To avoid errors cropping
+ # up later this code checks that the currently imported package and the
+ # shared object that were loaded have the same version and raise an
+ # ImportError if they do not
+ so_package_version = _openssl.ffi.string(
+ _openssl.lib.CRYPTOGRAPHY_PACKAGE_VERSION
+ )
+ if version.encode("ascii") != so_package_version:
+ raise ImportError(
+ "The version of cryptography does not match the loaded "
+ "shared object. This can happen if you have multiple copies of "
+ "cryptography installed in your Python path. Please try creating "
+ "a new virtual environment to resolve this issue. "
+ "Loaded python version: {}, shared object version: {}".format(
+ version, so_package_version
+ )
+ )
+
+ _openssl_assert(
+ _openssl.lib,
+ _openssl.lib.OpenSSL_version_num() == openssl.openssl_version(),
+ )
+
+
+_verify_package_version(cryptography.__version__)
+
+Binding.init_static_locks()
+
+if (
+ sys.platform == "win32"
+ and os.environ.get("PROCESSOR_ARCHITEW6432") is not None
+):
+ warnings.warn(
+ "You are using cryptography on a 32-bit Python on a 64-bit Windows "
+ "Operating System. Cryptography will be significantly faster if you "
+ "switch to using a 64-bit Python.",
+ UserWarning,
+ stacklevel=2,
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__init__.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__init__.py
new file mode 100644
index 00000000..b5093362
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__init__.py
@@ -0,0 +1,3 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..578537ee
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc
new file mode 100644
index 00000000..18677a16
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc
new file mode 100644
index 00000000..f8c22331
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc
new file mode 100644
index 00000000..4ba69f79
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc
new file mode 100644
index 00000000..fead4108
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc
new file mode 100644
index 00000000..cd363868
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc
new file mode 100644
index 00000000..f7c1380f
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc
new file mode 100644
index 00000000..eebe5882
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc
new file mode 100644
index 00000000..3dde4e79
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc
new file mode 100644
index 00000000..0dce985f
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc
new file mode 100644
index 00000000..0c2e9eff
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_asymmetric.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_asymmetric.py
new file mode 100644
index 00000000..ea55ffdf
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_asymmetric.py
@@ -0,0 +1,19 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+
+# This exists to break an import cycle. It is normally accessible from the
+# asymmetric padding module.
+
+
+class AsymmetricPadding(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def name(self) -> str:
+ """
+ A string naming this padding (e.g. "PSS", "PKCS1").
+ """
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py
new file mode 100644
index 00000000..3b880b64
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py
@@ -0,0 +1,45 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+# This exists to break an import cycle. It is normally accessible from the
+# ciphers module.
+
+
+class CipherAlgorithm(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def name(self) -> str:
+ """
+ A string naming this mode (e.g. "AES", "Camellia").
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_sizes(self) -> typing.FrozenSet[int]:
+ """
+ Valid key sizes for this algorithm in bits
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The size of the key being used as an integer in bits (e.g. 128, 256).
+ """
+
+
+class BlockCipherAlgorithm(CipherAlgorithm):
+ key: bytes
+
+ @property
+ @abc.abstractmethod
+ def block_size(self) -> int:
+ """
+ The size of a block as an integer in bits (e.g. 64, 128).
+ """
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_serialization.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_serialization.py
new file mode 100644
index 00000000..34f3fbc8
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/_serialization.py
@@ -0,0 +1,170 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+from cryptography import utils
+from cryptography.hazmat.primitives.hashes import HashAlgorithm
+
+# This exists to break an import cycle. These classes are normally accessible
+# from the serialization module.
+
+
+class PBES(utils.Enum):
+ PBESv1SHA1And3KeyTripleDESCBC = "PBESv1 using SHA1 and 3-Key TripleDES"
+ PBESv2SHA256AndAES256CBC = "PBESv2 using SHA256 PBKDF2 and AES256 CBC"
+
+
+class Encoding(utils.Enum):
+ PEM = "PEM"
+ DER = "DER"
+ OpenSSH = "OpenSSH"
+ Raw = "Raw"
+ X962 = "ANSI X9.62"
+ SMIME = "S/MIME"
+
+
+class PrivateFormat(utils.Enum):
+ PKCS8 = "PKCS8"
+ TraditionalOpenSSL = "TraditionalOpenSSL"
+ Raw = "Raw"
+ OpenSSH = "OpenSSH"
+ PKCS12 = "PKCS12"
+
+ def encryption_builder(self) -> KeySerializationEncryptionBuilder:
+ if self not in (PrivateFormat.OpenSSH, PrivateFormat.PKCS12):
+ raise ValueError(
+ "encryption_builder only supported with PrivateFormat.OpenSSH"
+ " and PrivateFormat.PKCS12"
+ )
+ return KeySerializationEncryptionBuilder(self)
+
+
+class PublicFormat(utils.Enum):
+ SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1"
+ PKCS1 = "Raw PKCS#1"
+ OpenSSH = "OpenSSH"
+ Raw = "Raw"
+ CompressedPoint = "X9.62 Compressed Point"
+ UncompressedPoint = "X9.62 Uncompressed Point"
+
+
+class ParameterFormat(utils.Enum):
+ PKCS3 = "PKCS3"
+
+
+class KeySerializationEncryption(metaclass=abc.ABCMeta):
+ pass
+
+
+class BestAvailableEncryption(KeySerializationEncryption):
+ def __init__(self, password: bytes):
+ if not isinstance(password, bytes) or len(password) == 0:
+ raise ValueError("Password must be 1 or more bytes.")
+
+ self.password = password
+
+
+class NoEncryption(KeySerializationEncryption):
+ pass
+
+
+class KeySerializationEncryptionBuilder:
+ def __init__(
+ self,
+ format: PrivateFormat,
+ *,
+ _kdf_rounds: typing.Optional[int] = None,
+ _hmac_hash: typing.Optional[HashAlgorithm] = None,
+ _key_cert_algorithm: typing.Optional[PBES] = None,
+ ) -> None:
+ self._format = format
+
+ self._kdf_rounds = _kdf_rounds
+ self._hmac_hash = _hmac_hash
+ self._key_cert_algorithm = _key_cert_algorithm
+
+ def kdf_rounds(self, rounds: int) -> KeySerializationEncryptionBuilder:
+ if self._kdf_rounds is not None:
+ raise ValueError("kdf_rounds already set")
+
+ if not isinstance(rounds, int):
+ raise TypeError("kdf_rounds must be an integer")
+
+ if rounds < 1:
+ raise ValueError("kdf_rounds must be a positive integer")
+
+ return KeySerializationEncryptionBuilder(
+ self._format,
+ _kdf_rounds=rounds,
+ _hmac_hash=self._hmac_hash,
+ _key_cert_algorithm=self._key_cert_algorithm,
+ )
+
+ def hmac_hash(
+ self, algorithm: HashAlgorithm
+ ) -> KeySerializationEncryptionBuilder:
+ if self._format is not PrivateFormat.PKCS12:
+ raise TypeError(
+ "hmac_hash only supported with PrivateFormat.PKCS12"
+ )
+
+ if self._hmac_hash is not None:
+ raise ValueError("hmac_hash already set")
+ return KeySerializationEncryptionBuilder(
+ self._format,
+ _kdf_rounds=self._kdf_rounds,
+ _hmac_hash=algorithm,
+ _key_cert_algorithm=self._key_cert_algorithm,
+ )
+
+ def key_cert_algorithm(
+ self, algorithm: PBES
+ ) -> KeySerializationEncryptionBuilder:
+ if self._format is not PrivateFormat.PKCS12:
+ raise TypeError(
+ "key_cert_algorithm only supported with "
+ "PrivateFormat.PKCS12"
+ )
+ if self._key_cert_algorithm is not None:
+ raise ValueError("key_cert_algorithm already set")
+ return KeySerializationEncryptionBuilder(
+ self._format,
+ _kdf_rounds=self._kdf_rounds,
+ _hmac_hash=self._hmac_hash,
+ _key_cert_algorithm=algorithm,
+ )
+
+ def build(self, password: bytes) -> KeySerializationEncryption:
+ if not isinstance(password, bytes) or len(password) == 0:
+ raise ValueError("Password must be 1 or more bytes.")
+
+ return _KeySerializationEncryption(
+ self._format,
+ password,
+ kdf_rounds=self._kdf_rounds,
+ hmac_hash=self._hmac_hash,
+ key_cert_algorithm=self._key_cert_algorithm,
+ )
+
+
+class _KeySerializationEncryption(KeySerializationEncryption):
+ def __init__(
+ self,
+ format: PrivateFormat,
+ password: bytes,
+ *,
+ kdf_rounds: typing.Optional[int],
+ hmac_hash: typing.Optional[HashAlgorithm],
+ key_cert_algorithm: typing.Optional[PBES],
+ ):
+ self._format = format
+ self.password = password
+
+ self._kdf_rounds = kdf_rounds
+ self._hmac_hash = hmac_hash
+ self._key_cert_algorithm = key_cert_algorithm
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py
new file mode 100644
index 00000000..b5093362
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py
@@ -0,0 +1,3 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..f6223a0a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc
new file mode 100644
index 00000000..beb70dad
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc
new file mode 100644
index 00000000..cb3a0dce
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc
new file mode 100644
index 00000000..e6fd7d05
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc
new file mode 100644
index 00000000..348468aa
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc
new file mode 100644
index 00000000..7fcf055e
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc
new file mode 100644
index 00000000..b8552df8
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc
new file mode 100644
index 00000000..7007cdf0
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc
new file mode 100644
index 00000000..cce16384
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc
new file mode 100644
index 00000000..7cc3abac
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc
new file mode 100644
index 00000000..5d7ef7af
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc
new file mode 100644
index 00000000..58f83544
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py
new file mode 100644
index 00000000..751bcc40
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py
@@ -0,0 +1,261 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import _serialization
+
+
+def generate_parameters(
+ generator: int, key_size: int, backend: typing.Any = None
+) -> DHParameters:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.generate_dh_parameters(generator, key_size)
+
+
+class DHParameterNumbers:
+ def __init__(self, p: int, g: int, q: typing.Optional[int] = None) -> None:
+ if not isinstance(p, int) or not isinstance(g, int):
+ raise TypeError("p and g must be integers")
+ if q is not None and not isinstance(q, int):
+ raise TypeError("q must be integer or None")
+
+ if g < 2:
+ raise ValueError("DH generator must be 2 or greater")
+
+ if p.bit_length() < rust_openssl.dh.MIN_MODULUS_SIZE:
+ raise ValueError(
+ f"p (modulus) must be at least "
+ f"{rust_openssl.dh.MIN_MODULUS_SIZE}-bit"
+ )
+
+ self._p = p
+ self._g = g
+ self._q = q
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DHParameterNumbers):
+ return NotImplemented
+
+ return (
+ self._p == other._p and self._g == other._g and self._q == other._q
+ )
+
+ def parameters(self, backend: typing.Any = None) -> DHParameters:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_dh_parameter_numbers(self)
+
+ @property
+ def p(self) -> int:
+ return self._p
+
+ @property
+ def g(self) -> int:
+ return self._g
+
+ @property
+ def q(self) -> typing.Optional[int]:
+ return self._q
+
+
+class DHPublicNumbers:
+ def __init__(self, y: int, parameter_numbers: DHParameterNumbers) -> None:
+ if not isinstance(y, int):
+ raise TypeError("y must be an integer.")
+
+ if not isinstance(parameter_numbers, DHParameterNumbers):
+ raise TypeError(
+ "parameters must be an instance of DHParameterNumbers."
+ )
+
+ self._y = y
+ self._parameter_numbers = parameter_numbers
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DHPublicNumbers):
+ return NotImplemented
+
+ return (
+ self._y == other._y
+ and self._parameter_numbers == other._parameter_numbers
+ )
+
+ def public_key(self, backend: typing.Any = None) -> DHPublicKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_dh_public_numbers(self)
+
+ @property
+ def y(self) -> int:
+ return self._y
+
+ @property
+ def parameter_numbers(self) -> DHParameterNumbers:
+ return self._parameter_numbers
+
+
+class DHPrivateNumbers:
+ def __init__(self, x: int, public_numbers: DHPublicNumbers) -> None:
+ if not isinstance(x, int):
+ raise TypeError("x must be an integer.")
+
+ if not isinstance(public_numbers, DHPublicNumbers):
+ raise TypeError(
+ "public_numbers must be an instance of " "DHPublicNumbers."
+ )
+
+ self._x = x
+ self._public_numbers = public_numbers
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DHPrivateNumbers):
+ return NotImplemented
+
+ return (
+ self._x == other._x
+ and self._public_numbers == other._public_numbers
+ )
+
+ def private_key(self, backend: typing.Any = None) -> DHPrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_dh_private_numbers(self)
+
+ @property
+ def public_numbers(self) -> DHPublicNumbers:
+ return self._public_numbers
+
+ @property
+ def x(self) -> int:
+ return self._x
+
+
+class DHParameters(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def generate_private_key(self) -> DHPrivateKey:
+ """
+ Generates and returns a DHPrivateKey.
+ """
+
+ @abc.abstractmethod
+ def parameter_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.ParameterFormat,
+ ) -> bytes:
+ """
+ Returns the parameters serialized as bytes.
+ """
+
+ @abc.abstractmethod
+ def parameter_numbers(self) -> DHParameterNumbers:
+ """
+ Returns a DHParameterNumbers.
+ """
+
+
+DHParametersWithSerialization = DHParameters
+DHParameters.register(rust_openssl.dh.DHParameters)
+
+
+class DHPublicKey(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The bit length of the prime modulus.
+ """
+
+ @abc.abstractmethod
+ def parameters(self) -> DHParameters:
+ """
+ The DHParameters object associated with this public key.
+ """
+
+ @abc.abstractmethod
+ def public_numbers(self) -> DHPublicNumbers:
+ """
+ Returns a DHPublicNumbers.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+
+DHPublicKeyWithSerialization = DHPublicKey
+DHPublicKey.register(rust_openssl.dh.DHPublicKey)
+
+
+class DHPrivateKey(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The bit length of the prime modulus.
+ """
+
+ @abc.abstractmethod
+ def public_key(self) -> DHPublicKey:
+ """
+ The DHPublicKey associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def parameters(self) -> DHParameters:
+ """
+ The DHParameters object associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def exchange(self, peer_public_key: DHPublicKey) -> bytes:
+ """
+ Given peer's DHPublicKey, carry out the key exchange and
+ return shared key as bytes.
+ """
+
+ @abc.abstractmethod
+ def private_numbers(self) -> DHPrivateNumbers:
+ """
+ Returns a DHPrivateNumbers.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+
+DHPrivateKeyWithSerialization = DHPrivateKey
+DHPrivateKey.register(rust_openssl.dh.DHPrivateKey)
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py
new file mode 100644
index 00000000..a8c52de4
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py
@@ -0,0 +1,299 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import _serialization, hashes
+from cryptography.hazmat.primitives.asymmetric import utils as asym_utils
+
+
+class DSAParameters(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def generate_private_key(self) -> DSAPrivateKey:
+ """
+ Generates and returns a DSAPrivateKey.
+ """
+
+ @abc.abstractmethod
+ def parameter_numbers(self) -> DSAParameterNumbers:
+ """
+ Returns a DSAParameterNumbers.
+ """
+
+
+DSAParametersWithNumbers = DSAParameters
+DSAParameters.register(rust_openssl.dsa.DSAParameters)
+
+
+class DSAPrivateKey(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The bit length of the prime modulus.
+ """
+
+ @abc.abstractmethod
+ def public_key(self) -> DSAPublicKey:
+ """
+ The DSAPublicKey associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def parameters(self) -> DSAParameters:
+ """
+ The DSAParameters object associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def sign(
+ self,
+ data: bytes,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> bytes:
+ """
+ Signs the data
+ """
+
+ @abc.abstractmethod
+ def private_numbers(self) -> DSAPrivateNumbers:
+ """
+ Returns a DSAPrivateNumbers.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+
+DSAPrivateKeyWithSerialization = DSAPrivateKey
+DSAPrivateKey.register(rust_openssl.dsa.DSAPrivateKey)
+
+
+class DSAPublicKey(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The bit length of the prime modulus.
+ """
+
+ @abc.abstractmethod
+ def parameters(self) -> DSAParameters:
+ """
+ The DSAParameters object associated with this public key.
+ """
+
+ @abc.abstractmethod
+ def public_numbers(self) -> DSAPublicNumbers:
+ """
+ Returns a DSAPublicNumbers.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+ @abc.abstractmethod
+ def verify(
+ self,
+ signature: bytes,
+ data: bytes,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> None:
+ """
+ Verifies the signature of the data.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+
+DSAPublicKeyWithSerialization = DSAPublicKey
+DSAPublicKey.register(rust_openssl.dsa.DSAPublicKey)
+
+
+class DSAParameterNumbers:
+ def __init__(self, p: int, q: int, g: int):
+ if (
+ not isinstance(p, int)
+ or not isinstance(q, int)
+ or not isinstance(g, int)
+ ):
+ raise TypeError(
+ "DSAParameterNumbers p, q, and g arguments must be integers."
+ )
+
+ self._p = p
+ self._q = q
+ self._g = g
+
+ @property
+ def p(self) -> int:
+ return self._p
+
+ @property
+ def q(self) -> int:
+ return self._q
+
+ @property
+ def g(self) -> int:
+ return self._g
+
+ def parameters(self, backend: typing.Any = None) -> DSAParameters:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_dsa_parameter_numbers(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DSAParameterNumbers):
+ return NotImplemented
+
+ return self.p == other.p and self.q == other.q and self.g == other.g
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self=self)
+ )
+
+
+class DSAPublicNumbers:
+ def __init__(self, y: int, parameter_numbers: DSAParameterNumbers):
+ if not isinstance(y, int):
+ raise TypeError("DSAPublicNumbers y argument must be an integer.")
+
+ if not isinstance(parameter_numbers, DSAParameterNumbers):
+ raise TypeError(
+ "parameter_numbers must be a DSAParameterNumbers instance."
+ )
+
+ self._y = y
+ self._parameter_numbers = parameter_numbers
+
+ @property
+ def y(self) -> int:
+ return self._y
+
+ @property
+ def parameter_numbers(self) -> DSAParameterNumbers:
+ return self._parameter_numbers
+
+ def public_key(self, backend: typing.Any = None) -> DSAPublicKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_dsa_public_numbers(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DSAPublicNumbers):
+ return NotImplemented
+
+ return (
+ self.y == other.y
+ and self.parameter_numbers == other.parameter_numbers
+ )
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self=self)
+ )
+
+
+class DSAPrivateNumbers:
+ def __init__(self, x: int, public_numbers: DSAPublicNumbers):
+ if not isinstance(x, int):
+ raise TypeError("DSAPrivateNumbers x argument must be an integer.")
+
+ if not isinstance(public_numbers, DSAPublicNumbers):
+ raise TypeError(
+ "public_numbers must be a DSAPublicNumbers instance."
+ )
+ self._public_numbers = public_numbers
+ self._x = x
+
+ @property
+ def x(self) -> int:
+ return self._x
+
+ @property
+ def public_numbers(self) -> DSAPublicNumbers:
+ return self._public_numbers
+
+ def private_key(self, backend: typing.Any = None) -> DSAPrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_dsa_private_numbers(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DSAPrivateNumbers):
+ return NotImplemented
+
+ return (
+ self.x == other.x and self.public_numbers == other.public_numbers
+ )
+
+
+def generate_parameters(
+ key_size: int, backend: typing.Any = None
+) -> DSAParameters:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.generate_dsa_parameters(key_size)
+
+
+def generate_private_key(
+ key_size: int, backend: typing.Any = None
+) -> DSAPrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.generate_dsa_private_key_and_parameters(key_size)
+
+
+def _check_dsa_parameters(parameters: DSAParameterNumbers) -> None:
+ if parameters.p.bit_length() not in [1024, 2048, 3072, 4096]:
+ raise ValueError(
+ "p must be exactly 1024, 2048, 3072, or 4096 bits long"
+ )
+ if parameters.q.bit_length() not in [160, 224, 256]:
+ raise ValueError("q must be exactly 160, 224, or 256 bits long")
+
+ if not (1 < parameters.g < parameters.p):
+ raise ValueError("g, p don't satisfy 1 < g < p.")
+
+
+def _check_dsa_private_numbers(numbers: DSAPrivateNumbers) -> None:
+ parameters = numbers.public_numbers.parameter_numbers
+ _check_dsa_parameters(parameters)
+ if numbers.x <= 0 or numbers.x >= parameters.q:
+ raise ValueError("x must be > 0 and < q.")
+
+ if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p):
+ raise ValueError("y must be equal to (g ** x % p).")
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py
new file mode 100644
index 00000000..ddfaabf4
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py
@@ -0,0 +1,490 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+from cryptography import utils
+from cryptography.hazmat._oid import ObjectIdentifier
+from cryptography.hazmat.primitives import _serialization, hashes
+from cryptography.hazmat.primitives.asymmetric import utils as asym_utils
+
+
+class EllipticCurveOID:
+ SECP192R1 = ObjectIdentifier("1.2.840.10045.3.1.1")
+ SECP224R1 = ObjectIdentifier("1.3.132.0.33")
+ SECP256K1 = ObjectIdentifier("1.3.132.0.10")
+ SECP256R1 = ObjectIdentifier("1.2.840.10045.3.1.7")
+ SECP384R1 = ObjectIdentifier("1.3.132.0.34")
+ SECP521R1 = ObjectIdentifier("1.3.132.0.35")
+ BRAINPOOLP256R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.7")
+ BRAINPOOLP384R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.11")
+ BRAINPOOLP512R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.13")
+ SECT163K1 = ObjectIdentifier("1.3.132.0.1")
+ SECT163R2 = ObjectIdentifier("1.3.132.0.15")
+ SECT233K1 = ObjectIdentifier("1.3.132.0.26")
+ SECT233R1 = ObjectIdentifier("1.3.132.0.27")
+ SECT283K1 = ObjectIdentifier("1.3.132.0.16")
+ SECT283R1 = ObjectIdentifier("1.3.132.0.17")
+ SECT409K1 = ObjectIdentifier("1.3.132.0.36")
+ SECT409R1 = ObjectIdentifier("1.3.132.0.37")
+ SECT571K1 = ObjectIdentifier("1.3.132.0.38")
+ SECT571R1 = ObjectIdentifier("1.3.132.0.39")
+
+
+class EllipticCurve(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def name(self) -> str:
+ """
+ The name of the curve. e.g. secp256r1.
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ Bit size of a secret scalar for the curve.
+ """
+
+
+class EllipticCurveSignatureAlgorithm(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def algorithm(
+ self,
+ ) -> typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm]:
+ """
+ The digest algorithm used with this signature.
+ """
+
+
+class EllipticCurvePrivateKey(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def exchange(
+ self, algorithm: ECDH, peer_public_key: EllipticCurvePublicKey
+ ) -> bytes:
+ """
+ Performs a key exchange operation using the provided algorithm with the
+ provided peer's public key.
+ """
+
+ @abc.abstractmethod
+ def public_key(self) -> EllipticCurvePublicKey:
+ """
+ The EllipticCurvePublicKey for this private key.
+ """
+
+ @property
+ @abc.abstractmethod
+ def curve(self) -> EllipticCurve:
+ """
+ The EllipticCurve that this key is on.
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ Bit size of a secret scalar for the curve.
+ """
+
+ @abc.abstractmethod
+ def sign(
+ self,
+ data: bytes,
+ signature_algorithm: EllipticCurveSignatureAlgorithm,
+ ) -> bytes:
+ """
+ Signs the data
+ """
+
+ @abc.abstractmethod
+ def private_numbers(self) -> EllipticCurvePrivateNumbers:
+ """
+ Returns an EllipticCurvePrivateNumbers.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+
+EllipticCurvePrivateKeyWithSerialization = EllipticCurvePrivateKey
+
+
+class EllipticCurvePublicKey(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def curve(self) -> EllipticCurve:
+ """
+ The EllipticCurve that this key is on.
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ Bit size of a secret scalar for the curve.
+ """
+
+ @abc.abstractmethod
+ def public_numbers(self) -> EllipticCurvePublicNumbers:
+ """
+ Returns an EllipticCurvePublicNumbers.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+ @abc.abstractmethod
+ def verify(
+ self,
+ signature: bytes,
+ data: bytes,
+ signature_algorithm: EllipticCurveSignatureAlgorithm,
+ ) -> None:
+ """
+ Verifies the signature of the data.
+ """
+
+ @classmethod
+ def from_encoded_point(
+ cls, curve: EllipticCurve, data: bytes
+ ) -> EllipticCurvePublicKey:
+ utils._check_bytes("data", data)
+
+ if not isinstance(curve, EllipticCurve):
+ raise TypeError("curve must be an EllipticCurve instance")
+
+ if len(data) == 0:
+ raise ValueError("data must not be an empty byte string")
+
+ if data[0] not in [0x02, 0x03, 0x04]:
+ raise ValueError("Unsupported elliptic curve point type")
+
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.load_elliptic_curve_public_bytes(curve, data)
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+
+EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey
+
+
+class SECT571R1(EllipticCurve):
+ name = "sect571r1"
+ key_size = 570
+
+
+class SECT409R1(EllipticCurve):
+ name = "sect409r1"
+ key_size = 409
+
+
+class SECT283R1(EllipticCurve):
+ name = "sect283r1"
+ key_size = 283
+
+
+class SECT233R1(EllipticCurve):
+ name = "sect233r1"
+ key_size = 233
+
+
+class SECT163R2(EllipticCurve):
+ name = "sect163r2"
+ key_size = 163
+
+
+class SECT571K1(EllipticCurve):
+ name = "sect571k1"
+ key_size = 571
+
+
+class SECT409K1(EllipticCurve):
+ name = "sect409k1"
+ key_size = 409
+
+
+class SECT283K1(EllipticCurve):
+ name = "sect283k1"
+ key_size = 283
+
+
+class SECT233K1(EllipticCurve):
+ name = "sect233k1"
+ key_size = 233
+
+
+class SECT163K1(EllipticCurve):
+ name = "sect163k1"
+ key_size = 163
+
+
+class SECP521R1(EllipticCurve):
+ name = "secp521r1"
+ key_size = 521
+
+
+class SECP384R1(EllipticCurve):
+ name = "secp384r1"
+ key_size = 384
+
+
+class SECP256R1(EllipticCurve):
+ name = "secp256r1"
+ key_size = 256
+
+
+class SECP256K1(EllipticCurve):
+ name = "secp256k1"
+ key_size = 256
+
+
+class SECP224R1(EllipticCurve):
+ name = "secp224r1"
+ key_size = 224
+
+
+class SECP192R1(EllipticCurve):
+ name = "secp192r1"
+ key_size = 192
+
+
+class BrainpoolP256R1(EllipticCurve):
+ name = "brainpoolP256r1"
+ key_size = 256
+
+
+class BrainpoolP384R1(EllipticCurve):
+ name = "brainpoolP384r1"
+ key_size = 384
+
+
+class BrainpoolP512R1(EllipticCurve):
+ name = "brainpoolP512r1"
+ key_size = 512
+
+
+_CURVE_TYPES: typing.Dict[str, typing.Type[EllipticCurve]] = {
+ "prime192v1": SECP192R1,
+ "prime256v1": SECP256R1,
+ "secp192r1": SECP192R1,
+ "secp224r1": SECP224R1,
+ "secp256r1": SECP256R1,
+ "secp384r1": SECP384R1,
+ "secp521r1": SECP521R1,
+ "secp256k1": SECP256K1,
+ "sect163k1": SECT163K1,
+ "sect233k1": SECT233K1,
+ "sect283k1": SECT283K1,
+ "sect409k1": SECT409K1,
+ "sect571k1": SECT571K1,
+ "sect163r2": SECT163R2,
+ "sect233r1": SECT233R1,
+ "sect283r1": SECT283R1,
+ "sect409r1": SECT409R1,
+ "sect571r1": SECT571R1,
+ "brainpoolP256r1": BrainpoolP256R1,
+ "brainpoolP384r1": BrainpoolP384R1,
+ "brainpoolP512r1": BrainpoolP512R1,
+}
+
+
+class ECDSA(EllipticCurveSignatureAlgorithm):
+ def __init__(
+ self,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ):
+ self._algorithm = algorithm
+
+ @property
+ def algorithm(
+ self,
+ ) -> typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm]:
+ return self._algorithm
+
+
+def generate_private_key(
+ curve: EllipticCurve, backend: typing.Any = None
+) -> EllipticCurvePrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.generate_elliptic_curve_private_key(curve)
+
+
+def derive_private_key(
+ private_value: int,
+ curve: EllipticCurve,
+ backend: typing.Any = None,
+) -> EllipticCurvePrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ if not isinstance(private_value, int):
+ raise TypeError("private_value must be an integer type.")
+
+ if private_value <= 0:
+ raise ValueError("private_value must be a positive integer.")
+
+ if not isinstance(curve, EllipticCurve):
+ raise TypeError("curve must provide the EllipticCurve interface.")
+
+ return ossl.derive_elliptic_curve_private_key(private_value, curve)
+
+
+class EllipticCurvePublicNumbers:
+ def __init__(self, x: int, y: int, curve: EllipticCurve):
+ if not isinstance(x, int) or not isinstance(y, int):
+ raise TypeError("x and y must be integers.")
+
+ if not isinstance(curve, EllipticCurve):
+ raise TypeError("curve must provide the EllipticCurve interface.")
+
+ self._y = y
+ self._x = x
+ self._curve = curve
+
+ def public_key(self, backend: typing.Any = None) -> EllipticCurvePublicKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_elliptic_curve_public_numbers(self)
+
+ @property
+ def curve(self) -> EllipticCurve:
+ return self._curve
+
+ @property
+ def x(self) -> int:
+ return self._x
+
+ @property
+ def y(self) -> int:
+ return self._y
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, EllipticCurvePublicNumbers):
+ return NotImplemented
+
+ return (
+ self.x == other.x
+ and self.y == other.y
+ and self.curve.name == other.curve.name
+ and self.curve.key_size == other.curve.key_size
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.x, self.y, self.curve.name, self.curve.key_size))
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+
+class EllipticCurvePrivateNumbers:
+ def __init__(
+ self, private_value: int, public_numbers: EllipticCurvePublicNumbers
+ ):
+ if not isinstance(private_value, int):
+ raise TypeError("private_value must be an integer.")
+
+ if not isinstance(public_numbers, EllipticCurvePublicNumbers):
+ raise TypeError(
+ "public_numbers must be an EllipticCurvePublicNumbers "
+ "instance."
+ )
+
+ self._private_value = private_value
+ self._public_numbers = public_numbers
+
+ def private_key(
+ self, backend: typing.Any = None
+ ) -> EllipticCurvePrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_elliptic_curve_private_numbers(self)
+
+ @property
+ def private_value(self) -> int:
+ return self._private_value
+
+ @property
+ def public_numbers(self) -> EllipticCurvePublicNumbers:
+ return self._public_numbers
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, EllipticCurvePrivateNumbers):
+ return NotImplemented
+
+ return (
+ self.private_value == other.private_value
+ and self.public_numbers == other.public_numbers
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.private_value, self.public_numbers))
+
+
+class ECDH:
+ pass
+
+
+_OID_TO_CURVE = {
+ EllipticCurveOID.SECP192R1: SECP192R1,
+ EllipticCurveOID.SECP224R1: SECP224R1,
+ EllipticCurveOID.SECP256K1: SECP256K1,
+ EllipticCurveOID.SECP256R1: SECP256R1,
+ EllipticCurveOID.SECP384R1: SECP384R1,
+ EllipticCurveOID.SECP521R1: SECP521R1,
+ EllipticCurveOID.BRAINPOOLP256R1: BrainpoolP256R1,
+ EllipticCurveOID.BRAINPOOLP384R1: BrainpoolP384R1,
+ EllipticCurveOID.BRAINPOOLP512R1: BrainpoolP512R1,
+ EllipticCurveOID.SECT163K1: SECT163K1,
+ EllipticCurveOID.SECT163R2: SECT163R2,
+ EllipticCurveOID.SECT233K1: SECT233K1,
+ EllipticCurveOID.SECT233R1: SECT233R1,
+ EllipticCurveOID.SECT283K1: SECT283K1,
+ EllipticCurveOID.SECT283R1: SECT283R1,
+ EllipticCurveOID.SECT409K1: SECT409K1,
+ EllipticCurveOID.SECT409R1: SECT409R1,
+ EllipticCurveOID.SECT571K1: SECT571K1,
+ EllipticCurveOID.SECT571R1: SECT571R1,
+}
+
+
+def get_curve_for_oid(oid: ObjectIdentifier) -> typing.Type[EllipticCurve]:
+ try:
+ return _OID_TO_CURVE[oid]
+ except KeyError:
+ raise LookupError(
+ "The provided object identifier has no matching elliptic "
+ "curve class"
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py
new file mode 100644
index 00000000..f26e54d2
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py
@@ -0,0 +1,118 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import _serialization
+
+
+class Ed25519PublicKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def from_public_bytes(cls, data: bytes) -> Ed25519PublicKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed25519_supported():
+ raise UnsupportedAlgorithm(
+ "ed25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed25519_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ The serialized bytes of the public key.
+ """
+
+ @abc.abstractmethod
+ def public_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the public key.
+ Equivalent to public_bytes(Raw, Raw).
+ """
+
+ @abc.abstractmethod
+ def verify(self, signature: bytes, data: bytes) -> None:
+ """
+ Verify the signature.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+
+if hasattr(rust_openssl, "ed25519"):
+ Ed25519PublicKey.register(rust_openssl.ed25519.Ed25519PublicKey)
+
+
+class Ed25519PrivateKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def generate(cls) -> Ed25519PrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed25519_supported():
+ raise UnsupportedAlgorithm(
+ "ed25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed25519_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data: bytes) -> Ed25519PrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed25519_supported():
+ raise UnsupportedAlgorithm(
+ "ed25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed25519_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self) -> Ed25519PublicKey:
+ """
+ The Ed25519PublicKey derived from the private key.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ The serialized bytes of the private key.
+ """
+
+ @abc.abstractmethod
+ def private_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the private key.
+ Equivalent to private_bytes(Raw, Raw, NoEncryption()).
+ """
+
+ @abc.abstractmethod
+ def sign(self, data: bytes) -> bytes:
+ """
+ Signs the data.
+ """
+
+
+if hasattr(rust_openssl, "x25519"):
+ Ed25519PrivateKey.register(rust_openssl.ed25519.Ed25519PrivateKey)
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py
new file mode 100644
index 00000000..a9a34b25
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py
@@ -0,0 +1,117 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import _serialization
+
+
+class Ed448PublicKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def from_public_bytes(cls, data: bytes) -> Ed448PublicKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed448_supported():
+ raise UnsupportedAlgorithm(
+ "ed448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed448_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ The serialized bytes of the public key.
+ """
+
+ @abc.abstractmethod
+ def public_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the public key.
+ Equivalent to public_bytes(Raw, Raw).
+ """
+
+ @abc.abstractmethod
+ def verify(self, signature: bytes, data: bytes) -> None:
+ """
+ Verify the signature.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+
+if hasattr(rust_openssl, "ed448"):
+ Ed448PublicKey.register(rust_openssl.ed448.Ed448PublicKey)
+
+
+class Ed448PrivateKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def generate(cls) -> Ed448PrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed448_supported():
+ raise UnsupportedAlgorithm(
+ "ed448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+ return backend.ed448_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data: bytes) -> Ed448PrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed448_supported():
+ raise UnsupportedAlgorithm(
+ "ed448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed448_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self) -> Ed448PublicKey:
+ """
+ The Ed448PublicKey derived from the private key.
+ """
+
+ @abc.abstractmethod
+ def sign(self, data: bytes) -> bytes:
+ """
+ Signs the data.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ The serialized bytes of the private key.
+ """
+
+ @abc.abstractmethod
+ def private_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the private key.
+ Equivalent to private_bytes(Raw, Raw, NoEncryption()).
+ """
+
+
+if hasattr(rust_openssl, "x448"):
+ Ed448PrivateKey.register(rust_openssl.ed448.Ed448PrivateKey)
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py
new file mode 100644
index 00000000..7198808e
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py
@@ -0,0 +1,102 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives._asymmetric import (
+ AsymmetricPadding as AsymmetricPadding,
+)
+from cryptography.hazmat.primitives.asymmetric import rsa
+
+
+class PKCS1v15(AsymmetricPadding):
+ name = "EMSA-PKCS1-v1_5"
+
+
+class _MaxLength:
+ "Sentinel value for `MAX_LENGTH`."
+
+
+class _Auto:
+ "Sentinel value for `AUTO`."
+
+
+class _DigestLength:
+ "Sentinel value for `DIGEST_LENGTH`."
+
+
+class PSS(AsymmetricPadding):
+ MAX_LENGTH = _MaxLength()
+ AUTO = _Auto()
+ DIGEST_LENGTH = _DigestLength()
+ name = "EMSA-PSS"
+ _salt_length: typing.Union[int, _MaxLength, _Auto, _DigestLength]
+
+ def __init__(
+ self,
+ mgf: MGF,
+ salt_length: typing.Union[int, _MaxLength, _Auto, _DigestLength],
+ ) -> None:
+ self._mgf = mgf
+
+ if not isinstance(
+ salt_length, (int, _MaxLength, _Auto, _DigestLength)
+ ):
+ raise TypeError(
+ "salt_length must be an integer, MAX_LENGTH, "
+ "DIGEST_LENGTH, or AUTO"
+ )
+
+ if isinstance(salt_length, int) and salt_length < 0:
+ raise ValueError("salt_length must be zero or greater.")
+
+ self._salt_length = salt_length
+
+
+class OAEP(AsymmetricPadding):
+ name = "EME-OAEP"
+
+ def __init__(
+ self,
+ mgf: MGF,
+ algorithm: hashes.HashAlgorithm,
+ label: typing.Optional[bytes],
+ ):
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Expected instance of hashes.HashAlgorithm.")
+
+ self._mgf = mgf
+ self._algorithm = algorithm
+ self._label = label
+
+
+class MGF(metaclass=abc.ABCMeta):
+ _algorithm: hashes.HashAlgorithm
+
+
+class MGF1(MGF):
+ MAX_LENGTH = _MaxLength()
+
+ def __init__(self, algorithm: hashes.HashAlgorithm):
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Expected instance of hashes.HashAlgorithm.")
+
+ self._algorithm = algorithm
+
+
+def calculate_max_pss_salt_length(
+ key: typing.Union[rsa.RSAPrivateKey, rsa.RSAPublicKey],
+ hash_algorithm: hashes.HashAlgorithm,
+) -> int:
+ if not isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)):
+ raise TypeError("key must be an RSA public or private key")
+ # bit length - 1 per RFC 3447
+ emlen = (key.key_size + 6) // 8
+ salt_length = emlen - hash_algorithm.digest_size - 2
+ assert salt_length >= 0
+ return salt_length
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py
new file mode 100644
index 00000000..b740f01f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py
@@ -0,0 +1,439 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+from math import gcd
+
+from cryptography.hazmat.primitives import _serialization, hashes
+from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding
+from cryptography.hazmat.primitives.asymmetric import utils as asym_utils
+
+
+class RSAPrivateKey(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def decrypt(self, ciphertext: bytes, padding: AsymmetricPadding) -> bytes:
+ """
+ Decrypts the provided ciphertext.
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The bit length of the public modulus.
+ """
+
+ @abc.abstractmethod
+ def public_key(self) -> RSAPublicKey:
+ """
+ The RSAPublicKey associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def sign(
+ self,
+ data: bytes,
+ padding: AsymmetricPadding,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> bytes:
+ """
+ Signs the data.
+ """
+
+ @abc.abstractmethod
+ def private_numbers(self) -> RSAPrivateNumbers:
+ """
+ Returns an RSAPrivateNumbers.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+
+RSAPrivateKeyWithSerialization = RSAPrivateKey
+
+
+class RSAPublicKey(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def encrypt(self, plaintext: bytes, padding: AsymmetricPadding) -> bytes:
+ """
+ Encrypts the given plaintext.
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The bit length of the public modulus.
+ """
+
+ @abc.abstractmethod
+ def public_numbers(self) -> RSAPublicNumbers:
+ """
+ Returns an RSAPublicNumbers
+ """
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+ @abc.abstractmethod
+ def verify(
+ self,
+ signature: bytes,
+ data: bytes,
+ padding: AsymmetricPadding,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> None:
+ """
+ Verifies the signature of the data.
+ """
+
+ @abc.abstractmethod
+ def recover_data_from_signature(
+ self,
+ signature: bytes,
+ padding: AsymmetricPadding,
+ algorithm: typing.Optional[hashes.HashAlgorithm],
+ ) -> bytes:
+ """
+ Recovers the original data from the signature.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+
+RSAPublicKeyWithSerialization = RSAPublicKey
+
+
+def generate_private_key(
+ public_exponent: int,
+ key_size: int,
+ backend: typing.Any = None,
+) -> RSAPrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ _verify_rsa_parameters(public_exponent, key_size)
+ return ossl.generate_rsa_private_key(public_exponent, key_size)
+
+
+def _verify_rsa_parameters(public_exponent: int, key_size: int) -> None:
+ if public_exponent not in (3, 65537):
+ raise ValueError(
+ "public_exponent must be either 3 (for legacy compatibility) or "
+ "65537. Almost everyone should choose 65537 here!"
+ )
+
+ if key_size < 512:
+ raise ValueError("key_size must be at least 512-bits.")
+
+
+def _check_private_key_components(
+ p: int,
+ q: int,
+ private_exponent: int,
+ dmp1: int,
+ dmq1: int,
+ iqmp: int,
+ public_exponent: int,
+ modulus: int,
+) -> None:
+ if modulus < 3:
+ raise ValueError("modulus must be >= 3.")
+
+ if p >= modulus:
+ raise ValueError("p must be < modulus.")
+
+ if q >= modulus:
+ raise ValueError("q must be < modulus.")
+
+ if dmp1 >= modulus:
+ raise ValueError("dmp1 must be < modulus.")
+
+ if dmq1 >= modulus:
+ raise ValueError("dmq1 must be < modulus.")
+
+ if iqmp >= modulus:
+ raise ValueError("iqmp must be < modulus.")
+
+ if private_exponent >= modulus:
+ raise ValueError("private_exponent must be < modulus.")
+
+ if public_exponent < 3 or public_exponent >= modulus:
+ raise ValueError("public_exponent must be >= 3 and < modulus.")
+
+ if public_exponent & 1 == 0:
+ raise ValueError("public_exponent must be odd.")
+
+ if dmp1 & 1 == 0:
+ raise ValueError("dmp1 must be odd.")
+
+ if dmq1 & 1 == 0:
+ raise ValueError("dmq1 must be odd.")
+
+ if p * q != modulus:
+ raise ValueError("p*q must equal modulus.")
+
+
+def _check_public_key_components(e: int, n: int) -> None:
+ if n < 3:
+ raise ValueError("n must be >= 3.")
+
+ if e < 3 or e >= n:
+ raise ValueError("e must be >= 3 and < n.")
+
+ if e & 1 == 0:
+ raise ValueError("e must be odd.")
+
+
+def _modinv(e: int, m: int) -> int:
+ """
+ Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1
+ """
+ x1, x2 = 1, 0
+ a, b = e, m
+ while b > 0:
+ q, r = divmod(a, b)
+ xn = x1 - q * x2
+ a, b, x1, x2 = b, r, x2, xn
+ return x1 % m
+
+
+def rsa_crt_iqmp(p: int, q: int) -> int:
+ """
+ Compute the CRT (q ** -1) % p value from RSA primes p and q.
+ """
+ return _modinv(q, p)
+
+
+def rsa_crt_dmp1(private_exponent: int, p: int) -> int:
+ """
+ Compute the CRT private_exponent % (p - 1) value from the RSA
+ private_exponent (d) and p.
+ """
+ return private_exponent % (p - 1)
+
+
+def rsa_crt_dmq1(private_exponent: int, q: int) -> int:
+ """
+ Compute the CRT private_exponent % (q - 1) value from the RSA
+ private_exponent (d) and q.
+ """
+ return private_exponent % (q - 1)
+
+
+# Controls the number of iterations rsa_recover_prime_factors will perform
+# to obtain the prime factors. Each iteration increments by 2 so the actual
+# maximum attempts is half this number.
+_MAX_RECOVERY_ATTEMPTS = 1000
+
+
+def rsa_recover_prime_factors(
+ n: int, e: int, d: int
+) -> typing.Tuple[int, int]:
+ """
+ Compute factors p and q from the private exponent d. We assume that n has
+ no more than two factors. This function is adapted from code in PyCrypto.
+ """
+ # See 8.2.2(i) in Handbook of Applied Cryptography.
+ ktot = d * e - 1
+ # The quantity d*e-1 is a multiple of phi(n), even,
+ # and can be represented as t*2^s.
+ t = ktot
+ while t % 2 == 0:
+ t = t // 2
+ # Cycle through all multiplicative inverses in Zn.
+ # The algorithm is non-deterministic, but there is a 50% chance
+ # any candidate a leads to successful factoring.
+ # See "Digitalized Signatures and Public Key Functions as Intractable
+ # as Factorization", M. Rabin, 1979
+ spotted = False
+ a = 2
+ while not spotted and a < _MAX_RECOVERY_ATTEMPTS:
+ k = t
+ # Cycle through all values a^{t*2^i}=a^k
+ while k < ktot:
+ cand = pow(a, k, n)
+ # Check if a^k is a non-trivial root of unity (mod n)
+ if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1:
+ # We have found a number such that (cand-1)(cand+1)=0 (mod n).
+ # Either of the terms divides n.
+ p = gcd(cand + 1, n)
+ spotted = True
+ break
+ k *= 2
+ # This value was not any good... let's try another!
+ a += 2
+ if not spotted:
+ raise ValueError("Unable to compute factors p and q from exponent d.")
+ # Found !
+ q, r = divmod(n, p)
+ assert r == 0
+ p, q = sorted((p, q), reverse=True)
+ return (p, q)
+
+
+class RSAPrivateNumbers:
+ def __init__(
+ self,
+ p: int,
+ q: int,
+ d: int,
+ dmp1: int,
+ dmq1: int,
+ iqmp: int,
+ public_numbers: RSAPublicNumbers,
+ ):
+ if (
+ not isinstance(p, int)
+ or not isinstance(q, int)
+ or not isinstance(d, int)
+ or not isinstance(dmp1, int)
+ or not isinstance(dmq1, int)
+ or not isinstance(iqmp, int)
+ ):
+ raise TypeError(
+ "RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must"
+ " all be an integers."
+ )
+
+ if not isinstance(public_numbers, RSAPublicNumbers):
+ raise TypeError(
+ "RSAPrivateNumbers public_numbers must be an RSAPublicNumbers"
+ " instance."
+ )
+
+ self._p = p
+ self._q = q
+ self._d = d
+ self._dmp1 = dmp1
+ self._dmq1 = dmq1
+ self._iqmp = iqmp
+ self._public_numbers = public_numbers
+
+ @property
+ def p(self) -> int:
+ return self._p
+
+ @property
+ def q(self) -> int:
+ return self._q
+
+ @property
+ def d(self) -> int:
+ return self._d
+
+ @property
+ def dmp1(self) -> int:
+ return self._dmp1
+
+ @property
+ def dmq1(self) -> int:
+ return self._dmq1
+
+ @property
+ def iqmp(self) -> int:
+ return self._iqmp
+
+ @property
+ def public_numbers(self) -> RSAPublicNumbers:
+ return self._public_numbers
+
+ def private_key(
+ self,
+ backend: typing.Any = None,
+ *,
+ unsafe_skip_rsa_key_validation: bool = False,
+ ) -> RSAPrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_rsa_private_numbers(
+ self, unsafe_skip_rsa_key_validation
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, RSAPrivateNumbers):
+ return NotImplemented
+
+ return (
+ self.p == other.p
+ and self.q == other.q
+ and self.d == other.d
+ and self.dmp1 == other.dmp1
+ and self.dmq1 == other.dmq1
+ and self.iqmp == other.iqmp
+ and self.public_numbers == other.public_numbers
+ )
+
+ def __hash__(self) -> int:
+ return hash(
+ (
+ self.p,
+ self.q,
+ self.d,
+ self.dmp1,
+ self.dmq1,
+ self.iqmp,
+ self.public_numbers,
+ )
+ )
+
+
+class RSAPublicNumbers:
+ def __init__(self, e: int, n: int):
+ if not isinstance(e, int) or not isinstance(n, int):
+ raise TypeError("RSAPublicNumbers arguments must be integers.")
+
+ self._e = e
+ self._n = n
+
+ @property
+ def e(self) -> int:
+ return self._e
+
+ @property
+ def n(self) -> int:
+ return self._n
+
+ def public_key(self, backend: typing.Any = None) -> RSAPublicKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_rsa_public_numbers(self)
+
+ def __repr__(self) -> str:
+ return "".format(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, RSAPublicNumbers):
+ return NotImplemented
+
+ return self.e == other.e and self.n == other.n
+
+ def __hash__(self) -> int:
+ return hash((self.e, self.n))
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/types.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/types.py
new file mode 100644
index 00000000..1fe4eaf5
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/types.py
@@ -0,0 +1,111 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography import utils
+from cryptography.hazmat.primitives.asymmetric import (
+ dh,
+ dsa,
+ ec,
+ ed448,
+ ed25519,
+ rsa,
+ x448,
+ x25519,
+)
+
+# Every asymmetric key type
+PublicKeyTypes = typing.Union[
+ dh.DHPublicKey,
+ dsa.DSAPublicKey,
+ rsa.RSAPublicKey,
+ ec.EllipticCurvePublicKey,
+ ed25519.Ed25519PublicKey,
+ ed448.Ed448PublicKey,
+ x25519.X25519PublicKey,
+ x448.X448PublicKey,
+]
+PUBLIC_KEY_TYPES = PublicKeyTypes
+utils.deprecated(
+ PUBLIC_KEY_TYPES,
+ __name__,
+ "Use PublicKeyTypes instead",
+ utils.DeprecatedIn40,
+ name="PUBLIC_KEY_TYPES",
+)
+# Every asymmetric key type
+PrivateKeyTypes = typing.Union[
+ dh.DHPrivateKey,
+ ed25519.Ed25519PrivateKey,
+ ed448.Ed448PrivateKey,
+ rsa.RSAPrivateKey,
+ dsa.DSAPrivateKey,
+ ec.EllipticCurvePrivateKey,
+ x25519.X25519PrivateKey,
+ x448.X448PrivateKey,
+]
+PRIVATE_KEY_TYPES = PrivateKeyTypes
+utils.deprecated(
+ PRIVATE_KEY_TYPES,
+ __name__,
+ "Use PrivateKeyTypes instead",
+ utils.DeprecatedIn40,
+ name="PRIVATE_KEY_TYPES",
+)
+# Just the key types we allow to be used for x509 signing. This mirrors
+# the certificate public key types
+CertificateIssuerPrivateKeyTypes = typing.Union[
+ ed25519.Ed25519PrivateKey,
+ ed448.Ed448PrivateKey,
+ rsa.RSAPrivateKey,
+ dsa.DSAPrivateKey,
+ ec.EllipticCurvePrivateKey,
+]
+CERTIFICATE_PRIVATE_KEY_TYPES = CertificateIssuerPrivateKeyTypes
+utils.deprecated(
+ CERTIFICATE_PRIVATE_KEY_TYPES,
+ __name__,
+ "Use CertificateIssuerPrivateKeyTypes instead",
+ utils.DeprecatedIn40,
+ name="CERTIFICATE_PRIVATE_KEY_TYPES",
+)
+# Just the key types we allow to be used for x509 signing. This mirrors
+# the certificate private key types
+CertificateIssuerPublicKeyTypes = typing.Union[
+ dsa.DSAPublicKey,
+ rsa.RSAPublicKey,
+ ec.EllipticCurvePublicKey,
+ ed25519.Ed25519PublicKey,
+ ed448.Ed448PublicKey,
+]
+CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES = CertificateIssuerPublicKeyTypes
+utils.deprecated(
+ CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES,
+ __name__,
+ "Use CertificateIssuerPublicKeyTypes instead",
+ utils.DeprecatedIn40,
+ name="CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES",
+)
+# This type removes DHPublicKey. x448/x25519 can be a public key
+# but cannot be used in signing so they are allowed here.
+CertificatePublicKeyTypes = typing.Union[
+ dsa.DSAPublicKey,
+ rsa.RSAPublicKey,
+ ec.EllipticCurvePublicKey,
+ ed25519.Ed25519PublicKey,
+ ed448.Ed448PublicKey,
+ x25519.X25519PublicKey,
+ x448.X448PublicKey,
+]
+CERTIFICATE_PUBLIC_KEY_TYPES = CertificatePublicKeyTypes
+utils.deprecated(
+ CERTIFICATE_PUBLIC_KEY_TYPES,
+ __name__,
+ "Use CertificatePublicKeyTypes instead",
+ utils.DeprecatedIn40,
+ name="CERTIFICATE_PUBLIC_KEY_TYPES",
+)
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py
new file mode 100644
index 00000000..826b9567
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py
@@ -0,0 +1,24 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography.hazmat.bindings._rust import asn1
+from cryptography.hazmat.primitives import hashes
+
+decode_dss_signature = asn1.decode_dss_signature
+encode_dss_signature = asn1.encode_dss_signature
+
+
+class Prehashed:
+ def __init__(self, algorithm: hashes.HashAlgorithm):
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Expected instance of HashAlgorithm.")
+
+ self._algorithm = algorithm
+ self._digest_size = algorithm.digest_size
+
+ @property
+ def digest_size(self) -> int:
+ return self._digest_size
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py
new file mode 100644
index 00000000..699054c9
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py
@@ -0,0 +1,113 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import _serialization
+
+
+class X25519PublicKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def from_public_bytes(cls, data: bytes) -> X25519PublicKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x25519_supported():
+ raise UnsupportedAlgorithm(
+ "X25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ return backend.x25519_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ The serialized bytes of the public key.
+ """
+
+ @abc.abstractmethod
+ def public_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the public key.
+ Equivalent to public_bytes(Raw, Raw).
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+
+# For LibreSSL
+if hasattr(rust_openssl, "x25519"):
+ X25519PublicKey.register(rust_openssl.x25519.X25519PublicKey)
+
+
+class X25519PrivateKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def generate(cls) -> X25519PrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x25519_supported():
+ raise UnsupportedAlgorithm(
+ "X25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+ return backend.x25519_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data: bytes) -> X25519PrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x25519_supported():
+ raise UnsupportedAlgorithm(
+ "X25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ return backend.x25519_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self) -> X25519PublicKey:
+ """
+ Returns the public key assosciated with this private key
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ The serialized bytes of the private key.
+ """
+
+ @abc.abstractmethod
+ def private_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the private key.
+ Equivalent to private_bytes(Raw, Raw, NoEncryption()).
+ """
+
+ @abc.abstractmethod
+ def exchange(self, peer_public_key: X25519PublicKey) -> bytes:
+ """
+ Performs a key exchange operation using the provided peer's public key.
+ """
+
+
+# For LibreSSL
+if hasattr(rust_openssl, "x25519"):
+ X25519PrivateKey.register(rust_openssl.x25519.X25519PrivateKey)
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py
new file mode 100644
index 00000000..abf78485
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py
@@ -0,0 +1,111 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import _serialization
+
+
+class X448PublicKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def from_public_bytes(cls, data: bytes) -> X448PublicKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x448_supported():
+ raise UnsupportedAlgorithm(
+ "X448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ return backend.x448_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ The serialized bytes of the public key.
+ """
+
+ @abc.abstractmethod
+ def public_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the public key.
+ Equivalent to public_bytes(Raw, Raw).
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+
+if hasattr(rust_openssl, "x448"):
+ X448PublicKey.register(rust_openssl.x448.X448PublicKey)
+
+
+class X448PrivateKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def generate(cls) -> X448PrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x448_supported():
+ raise UnsupportedAlgorithm(
+ "X448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+ return backend.x448_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data: bytes) -> X448PrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x448_supported():
+ raise UnsupportedAlgorithm(
+ "X448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ return backend.x448_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self) -> X448PublicKey:
+ """
+ Returns the public key associated with this private key
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ The serialized bytes of the private key.
+ """
+
+ @abc.abstractmethod
+ def private_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the private key.
+ Equivalent to private_bytes(Raw, Raw, NoEncryption()).
+ """
+
+ @abc.abstractmethod
+ def exchange(self, peer_public_key: X448PublicKey) -> bytes:
+ """
+ Performs a key exchange operation using the provided peer's public key.
+ """
+
+
+if hasattr(rust_openssl, "x448"):
+ X448PrivateKey.register(rust_openssl.x448.X448PrivateKey)
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py
new file mode 100644
index 00000000..cc88fbf2
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py
@@ -0,0 +1,27 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography.hazmat.primitives._cipheralgorithm import (
+ BlockCipherAlgorithm,
+ CipherAlgorithm,
+)
+from cryptography.hazmat.primitives.ciphers.base import (
+ AEADCipherContext,
+ AEADDecryptionContext,
+ AEADEncryptionContext,
+ Cipher,
+ CipherContext,
+)
+
+__all__ = [
+ "Cipher",
+ "CipherAlgorithm",
+ "BlockCipherAlgorithm",
+ "CipherContext",
+ "AEADCipherContext",
+ "AEADDecryptionContext",
+ "AEADEncryptionContext",
+]
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..de7e90f0
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc
new file mode 100644
index 00000000..cc1fd932
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc
new file mode 100644
index 00000000..fc95ac48
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc
new file mode 100644
index 00000000..0a8823a4
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc
new file mode 100644
index 00000000..c8f0020a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/aead.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/aead.py
new file mode 100644
index 00000000..957b2d22
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/aead.py
@@ -0,0 +1,378 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import os
+import typing
+
+from cryptography import exceptions, utils
+from cryptography.hazmat.backends.openssl import aead
+from cryptography.hazmat.backends.openssl.backend import backend
+from cryptography.hazmat.bindings._rust import FixedPool
+
+
+class ChaCha20Poly1305:
+ _MAX_SIZE = 2**31 - 1
+
+ def __init__(self, key: bytes):
+ if not backend.aead_cipher_supported(self):
+ raise exceptions.UnsupportedAlgorithm(
+ "ChaCha20Poly1305 is not supported by this version of OpenSSL",
+ exceptions._Reasons.UNSUPPORTED_CIPHER,
+ )
+ utils._check_byteslike("key", key)
+
+ if len(key) != 32:
+ raise ValueError("ChaCha20Poly1305 key must be 32 bytes.")
+
+ self._key = key
+ self._pool = FixedPool(self._create_fn)
+
+ @classmethod
+ def generate_key(cls) -> bytes:
+ return os.urandom(32)
+
+ def _create_fn(self):
+ return aead._aead_create_ctx(backend, self, self._key)
+
+ def encrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**31 - 1 bytes"
+ )
+
+ self._check_params(nonce, data, associated_data)
+ with self._pool.acquire() as ctx:
+ return aead._encrypt(
+ backend, self, nonce, data, [associated_data], 16, ctx
+ )
+
+ def decrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ self._check_params(nonce, data, associated_data)
+ with self._pool.acquire() as ctx:
+ return aead._decrypt(
+ backend, self, nonce, data, [associated_data], 16, ctx
+ )
+
+ def _check_params(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: bytes,
+ ) -> None:
+ utils._check_byteslike("nonce", nonce)
+ utils._check_byteslike("data", data)
+ utils._check_byteslike("associated_data", associated_data)
+ if len(nonce) != 12:
+ raise ValueError("Nonce must be 12 bytes")
+
+
+class AESCCM:
+ _MAX_SIZE = 2**31 - 1
+
+ def __init__(self, key: bytes, tag_length: int = 16):
+ utils._check_byteslike("key", key)
+ if len(key) not in (16, 24, 32):
+ raise ValueError("AESCCM key must be 128, 192, or 256 bits.")
+
+ self._key = key
+ if not isinstance(tag_length, int):
+ raise TypeError("tag_length must be an integer")
+
+ if tag_length not in (4, 6, 8, 10, 12, 14, 16):
+ raise ValueError("Invalid tag_length")
+
+ self._tag_length = tag_length
+
+ if not backend.aead_cipher_supported(self):
+ raise exceptions.UnsupportedAlgorithm(
+ "AESCCM is not supported by this version of OpenSSL",
+ exceptions._Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ @classmethod
+ def generate_key(cls, bit_length: int) -> bytes:
+ if not isinstance(bit_length, int):
+ raise TypeError("bit_length must be an integer")
+
+ if bit_length not in (128, 192, 256):
+ raise ValueError("bit_length must be 128, 192, or 256")
+
+ return os.urandom(bit_length // 8)
+
+ def encrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**31 - 1 bytes"
+ )
+
+ self._check_params(nonce, data, associated_data)
+ self._validate_lengths(nonce, len(data))
+ return aead._encrypt(
+ backend, self, nonce, data, [associated_data], self._tag_length
+ )
+
+ def decrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ self._check_params(nonce, data, associated_data)
+ return aead._decrypt(
+ backend, self, nonce, data, [associated_data], self._tag_length
+ )
+
+ def _validate_lengths(self, nonce: bytes, data_len: int) -> None:
+ # For information about computing this, see
+ # https://tools.ietf.org/html/rfc3610#section-2.1
+ l_val = 15 - len(nonce)
+ if 2 ** (8 * l_val) < data_len:
+ raise ValueError("Data too long for nonce")
+
+ def _check_params(
+ self, nonce: bytes, data: bytes, associated_data: bytes
+ ) -> None:
+ utils._check_byteslike("nonce", nonce)
+ utils._check_byteslike("data", data)
+ utils._check_byteslike("associated_data", associated_data)
+ if not 7 <= len(nonce) <= 13:
+ raise ValueError("Nonce must be between 7 and 13 bytes")
+
+
+class AESGCM:
+ _MAX_SIZE = 2**31 - 1
+
+ def __init__(self, key: bytes):
+ utils._check_byteslike("key", key)
+ if len(key) not in (16, 24, 32):
+ raise ValueError("AESGCM key must be 128, 192, or 256 bits.")
+
+ self._key = key
+
+ @classmethod
+ def generate_key(cls, bit_length: int) -> bytes:
+ if not isinstance(bit_length, int):
+ raise TypeError("bit_length must be an integer")
+
+ if bit_length not in (128, 192, 256):
+ raise ValueError("bit_length must be 128, 192, or 256")
+
+ return os.urandom(bit_length // 8)
+
+ def encrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**31 - 1 bytes"
+ )
+
+ self._check_params(nonce, data, associated_data)
+ return aead._encrypt(backend, self, nonce, data, [associated_data], 16)
+
+ def decrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ self._check_params(nonce, data, associated_data)
+ return aead._decrypt(backend, self, nonce, data, [associated_data], 16)
+
+ def _check_params(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: bytes,
+ ) -> None:
+ utils._check_byteslike("nonce", nonce)
+ utils._check_byteslike("data", data)
+ utils._check_byteslike("associated_data", associated_data)
+ if len(nonce) < 8 or len(nonce) > 128:
+ raise ValueError("Nonce must be between 8 and 128 bytes")
+
+
+class AESOCB3:
+ _MAX_SIZE = 2**31 - 1
+
+ def __init__(self, key: bytes):
+ utils._check_byteslike("key", key)
+ if len(key) not in (16, 24, 32):
+ raise ValueError("AESOCB3 key must be 128, 192, or 256 bits.")
+
+ self._key = key
+
+ if not backend.aead_cipher_supported(self):
+ raise exceptions.UnsupportedAlgorithm(
+ "OCB3 is not supported by this version of OpenSSL",
+ exceptions._Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ @classmethod
+ def generate_key(cls, bit_length: int) -> bytes:
+ if not isinstance(bit_length, int):
+ raise TypeError("bit_length must be an integer")
+
+ if bit_length not in (128, 192, 256):
+ raise ValueError("bit_length must be 128, 192, or 256")
+
+ return os.urandom(bit_length // 8)
+
+ def encrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**31 - 1 bytes"
+ )
+
+ self._check_params(nonce, data, associated_data)
+ return aead._encrypt(backend, self, nonce, data, [associated_data], 16)
+
+ def decrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ self._check_params(nonce, data, associated_data)
+ return aead._decrypt(backend, self, nonce, data, [associated_data], 16)
+
+ def _check_params(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: bytes,
+ ) -> None:
+ utils._check_byteslike("nonce", nonce)
+ utils._check_byteslike("data", data)
+ utils._check_byteslike("associated_data", associated_data)
+ if len(nonce) < 12 or len(nonce) > 15:
+ raise ValueError("Nonce must be between 12 and 15 bytes")
+
+
+class AESSIV:
+ _MAX_SIZE = 2**31 - 1
+
+ def __init__(self, key: bytes):
+ utils._check_byteslike("key", key)
+ if len(key) not in (32, 48, 64):
+ raise ValueError("AESSIV key must be 256, 384, or 512 bits.")
+
+ self._key = key
+
+ if not backend.aead_cipher_supported(self):
+ raise exceptions.UnsupportedAlgorithm(
+ "AES-SIV is not supported by this version of OpenSSL",
+ exceptions._Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ @classmethod
+ def generate_key(cls, bit_length: int) -> bytes:
+ if not isinstance(bit_length, int):
+ raise TypeError("bit_length must be an integer")
+
+ if bit_length not in (256, 384, 512):
+ raise ValueError("bit_length must be 256, 384, or 512")
+
+ return os.urandom(bit_length // 8)
+
+ def encrypt(
+ self,
+ data: bytes,
+ associated_data: typing.Optional[typing.List[bytes]],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = []
+
+ self._check_params(data, associated_data)
+
+ if len(data) > self._MAX_SIZE or any(
+ len(ad) > self._MAX_SIZE for ad in associated_data
+ ):
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**31 - 1 bytes"
+ )
+
+ return aead._encrypt(backend, self, b"", data, associated_data, 16)
+
+ def decrypt(
+ self,
+ data: bytes,
+ associated_data: typing.Optional[typing.List[bytes]],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = []
+
+ self._check_params(data, associated_data)
+
+ return aead._decrypt(backend, self, b"", data, associated_data, 16)
+
+ def _check_params(
+ self,
+ data: bytes,
+ associated_data: typing.List[bytes],
+ ) -> None:
+ utils._check_byteslike("data", data)
+ if len(data) == 0:
+ raise ValueError("data must not be zero length")
+
+ if not isinstance(associated_data, list):
+ raise TypeError(
+ "associated_data must be a list of bytes-like objects or None"
+ )
+ for x in associated_data:
+ utils._check_byteslike("associated_data elements", x)
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py
new file mode 100644
index 00000000..4bfc5d84
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py
@@ -0,0 +1,228 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography import utils
+from cryptography.hazmat.primitives.ciphers import (
+ BlockCipherAlgorithm,
+ CipherAlgorithm,
+)
+
+
+def _verify_key_size(algorithm: CipherAlgorithm, key: bytes) -> bytes:
+ # Verify that the key is instance of bytes
+ utils._check_byteslike("key", key)
+
+ # Verify that the key size matches the expected key size
+ if len(key) * 8 not in algorithm.key_sizes:
+ raise ValueError(
+ "Invalid key size ({}) for {}.".format(
+ len(key) * 8, algorithm.name
+ )
+ )
+ return key
+
+
+class AES(BlockCipherAlgorithm):
+ name = "AES"
+ block_size = 128
+ # 512 added to support AES-256-XTS, which uses 512-bit keys
+ key_sizes = frozenset([128, 192, 256, 512])
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+class AES128(BlockCipherAlgorithm):
+ name = "AES"
+ block_size = 128
+ key_sizes = frozenset([128])
+ key_size = 128
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+
+class AES256(BlockCipherAlgorithm):
+ name = "AES"
+ block_size = 128
+ key_sizes = frozenset([256])
+ key_size = 256
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+
+class Camellia(BlockCipherAlgorithm):
+ name = "camellia"
+ block_size = 128
+ key_sizes = frozenset([128, 192, 256])
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+class TripleDES(BlockCipherAlgorithm):
+ name = "3DES"
+ block_size = 64
+ key_sizes = frozenset([64, 128, 192])
+
+ def __init__(self, key: bytes):
+ if len(key) == 8:
+ key += key + key
+ elif len(key) == 16:
+ key += key[:8]
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+class Blowfish(BlockCipherAlgorithm):
+ name = "Blowfish"
+ block_size = 64
+ key_sizes = frozenset(range(32, 449, 8))
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+_BlowfishInternal = Blowfish
+utils.deprecated(
+ Blowfish,
+ __name__,
+ "Blowfish has been deprecated",
+ utils.DeprecatedIn37,
+ name="Blowfish",
+)
+
+
+class CAST5(BlockCipherAlgorithm):
+ name = "CAST5"
+ block_size = 64
+ key_sizes = frozenset(range(40, 129, 8))
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+_CAST5Internal = CAST5
+utils.deprecated(
+ CAST5,
+ __name__,
+ "CAST5 has been deprecated",
+ utils.DeprecatedIn37,
+ name="CAST5",
+)
+
+
+class ARC4(CipherAlgorithm):
+ name = "RC4"
+ key_sizes = frozenset([40, 56, 64, 80, 128, 160, 192, 256])
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+class IDEA(BlockCipherAlgorithm):
+ name = "IDEA"
+ block_size = 64
+ key_sizes = frozenset([128])
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+_IDEAInternal = IDEA
+utils.deprecated(
+ IDEA,
+ __name__,
+ "IDEA has been deprecated",
+ utils.DeprecatedIn37,
+ name="IDEA",
+)
+
+
+class SEED(BlockCipherAlgorithm):
+ name = "SEED"
+ block_size = 128
+ key_sizes = frozenset([128])
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+_SEEDInternal = SEED
+utils.deprecated(
+ SEED,
+ __name__,
+ "SEED has been deprecated",
+ utils.DeprecatedIn37,
+ name="SEED",
+)
+
+
+class ChaCha20(CipherAlgorithm):
+ name = "ChaCha20"
+ key_sizes = frozenset([256])
+
+ def __init__(self, key: bytes, nonce: bytes):
+ self.key = _verify_key_size(self, key)
+ utils._check_byteslike("nonce", nonce)
+
+ if len(nonce) != 16:
+ raise ValueError("nonce must be 128-bits (16 bytes)")
+
+ self._nonce = nonce
+
+ @property
+ def nonce(self) -> bytes:
+ return self._nonce
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+class SM4(BlockCipherAlgorithm):
+ name = "SM4"
+ block_size = 128
+ key_sizes = frozenset([128])
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/base.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/base.py
new file mode 100644
index 00000000..38a2ebbe
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/base.py
@@ -0,0 +1,269 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ AlreadyUpdated,
+ NotYetFinalized,
+)
+from cryptography.hazmat.primitives._cipheralgorithm import CipherAlgorithm
+from cryptography.hazmat.primitives.ciphers import modes
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.ciphers import (
+ _CipherContext as _BackendCipherContext,
+ )
+
+
+class CipherContext(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def update(self, data: bytes) -> bytes:
+ """
+ Processes the provided bytes through the cipher and returns the results
+ as bytes.
+ """
+
+ @abc.abstractmethod
+ def update_into(self, data: bytes, buf: bytes) -> int:
+ """
+ Processes the provided bytes and writes the resulting data into the
+ provided buffer. Returns the number of bytes written.
+ """
+
+ @abc.abstractmethod
+ def finalize(self) -> bytes:
+ """
+ Returns the results of processing the final block as bytes.
+ """
+
+
+class AEADCipherContext(CipherContext, metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def authenticate_additional_data(self, data: bytes) -> None:
+ """
+ Authenticates the provided bytes.
+ """
+
+
+class AEADDecryptionContext(AEADCipherContext, metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def finalize_with_tag(self, tag: bytes) -> bytes:
+ """
+ Returns the results of processing the final block as bytes and allows
+ delayed passing of the authentication tag.
+ """
+
+
+class AEADEncryptionContext(AEADCipherContext, metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def tag(self) -> bytes:
+ """
+ Returns tag bytes. This is only available after encryption is
+ finalized.
+ """
+
+
+Mode = typing.TypeVar(
+ "Mode", bound=typing.Optional[modes.Mode], covariant=True
+)
+
+
+class Cipher(typing.Generic[Mode]):
+ def __init__(
+ self,
+ algorithm: CipherAlgorithm,
+ mode: Mode,
+ backend: typing.Any = None,
+ ) -> None:
+ if not isinstance(algorithm, CipherAlgorithm):
+ raise TypeError("Expected interface of CipherAlgorithm.")
+
+ if mode is not None:
+ # mypy needs this assert to narrow the type from our generic
+ # type. Maybe it won't some time in the future.
+ assert isinstance(mode, modes.Mode)
+ mode.validate_for_algorithm(algorithm)
+
+ self.algorithm = algorithm
+ self.mode = mode
+
+ @typing.overload
+ def encryptor(
+ self: Cipher[modes.ModeWithAuthenticationTag],
+ ) -> AEADEncryptionContext:
+ ...
+
+ @typing.overload
+ def encryptor(
+ self: _CIPHER_TYPE,
+ ) -> CipherContext:
+ ...
+
+ def encryptor(self):
+ if isinstance(self.mode, modes.ModeWithAuthenticationTag):
+ if self.mode.tag is not None:
+ raise ValueError(
+ "Authentication tag must be None when encrypting."
+ )
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ ctx = backend.create_symmetric_encryption_ctx(
+ self.algorithm, self.mode
+ )
+ return self._wrap_ctx(ctx, encrypt=True)
+
+ @typing.overload
+ def decryptor(
+ self: Cipher[modes.ModeWithAuthenticationTag],
+ ) -> AEADDecryptionContext:
+ ...
+
+ @typing.overload
+ def decryptor(
+ self: _CIPHER_TYPE,
+ ) -> CipherContext:
+ ...
+
+ def decryptor(self):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ ctx = backend.create_symmetric_decryption_ctx(
+ self.algorithm, self.mode
+ )
+ return self._wrap_ctx(ctx, encrypt=False)
+
+ def _wrap_ctx(
+ self, ctx: _BackendCipherContext, encrypt: bool
+ ) -> typing.Union[
+ AEADEncryptionContext, AEADDecryptionContext, CipherContext
+ ]:
+ if isinstance(self.mode, modes.ModeWithAuthenticationTag):
+ if encrypt:
+ return _AEADEncryptionContext(ctx)
+ else:
+ return _AEADDecryptionContext(ctx)
+ else:
+ return _CipherContext(ctx)
+
+
+_CIPHER_TYPE = Cipher[
+ typing.Union[
+ modes.ModeWithNonce,
+ modes.ModeWithTweak,
+ None,
+ modes.ECB,
+ modes.ModeWithInitializationVector,
+ ]
+]
+
+
+class _CipherContext(CipherContext):
+ _ctx: typing.Optional[_BackendCipherContext]
+
+ def __init__(self, ctx: _BackendCipherContext) -> None:
+ self._ctx = ctx
+
+ def update(self, data: bytes) -> bytes:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ return self._ctx.update(data)
+
+ def update_into(self, data: bytes, buf: bytes) -> int:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ return self._ctx.update_into(data, buf)
+
+ def finalize(self) -> bytes:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ data = self._ctx.finalize()
+ self._ctx = None
+ return data
+
+
+class _AEADCipherContext(AEADCipherContext):
+ _ctx: typing.Optional[_BackendCipherContext]
+ _tag: typing.Optional[bytes]
+
+ def __init__(self, ctx: _BackendCipherContext) -> None:
+ self._ctx = ctx
+ self._bytes_processed = 0
+ self._aad_bytes_processed = 0
+ self._tag = None
+ self._updated = False
+
+ def _check_limit(self, data_size: int) -> None:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ self._updated = True
+ self._bytes_processed += data_size
+ if self._bytes_processed > self._ctx._mode._MAX_ENCRYPTED_BYTES:
+ raise ValueError(
+ "{} has a maximum encrypted byte limit of {}".format(
+ self._ctx._mode.name, self._ctx._mode._MAX_ENCRYPTED_BYTES
+ )
+ )
+
+ def update(self, data: bytes) -> bytes:
+ self._check_limit(len(data))
+ # mypy needs this assert even though _check_limit already checked
+ assert self._ctx is not None
+ return self._ctx.update(data)
+
+ def update_into(self, data: bytes, buf: bytes) -> int:
+ self._check_limit(len(data))
+ # mypy needs this assert even though _check_limit already checked
+ assert self._ctx is not None
+ return self._ctx.update_into(data, buf)
+
+ def finalize(self) -> bytes:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ data = self._ctx.finalize()
+ self._tag = self._ctx.tag
+ self._ctx = None
+ return data
+
+ def authenticate_additional_data(self, data: bytes) -> None:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ if self._updated:
+ raise AlreadyUpdated("Update has been called on this context.")
+
+ self._aad_bytes_processed += len(data)
+ if self._aad_bytes_processed > self._ctx._mode._MAX_AAD_BYTES:
+ raise ValueError(
+ "{} has a maximum AAD byte limit of {}".format(
+ self._ctx._mode.name, self._ctx._mode._MAX_AAD_BYTES
+ )
+ )
+
+ self._ctx.authenticate_additional_data(data)
+
+
+class _AEADDecryptionContext(_AEADCipherContext, AEADDecryptionContext):
+ def finalize_with_tag(self, tag: bytes) -> bytes:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ data = self._ctx.finalize_with_tag(tag)
+ self._tag = self._ctx.tag
+ self._ctx = None
+ return data
+
+
+class _AEADEncryptionContext(_AEADCipherContext, AEADEncryptionContext):
+ @property
+ def tag(self) -> bytes:
+ if self._ctx is not None:
+ raise NotYetFinalized(
+ "You must finalize encryption before " "getting the tag."
+ )
+ assert self._tag is not None
+ return self._tag
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/modes.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/modes.py
new file mode 100644
index 00000000..d8ea1888
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/ciphers/modes.py
@@ -0,0 +1,274 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.primitives._cipheralgorithm import (
+ BlockCipherAlgorithm,
+ CipherAlgorithm,
+)
+from cryptography.hazmat.primitives.ciphers import algorithms
+
+
+class Mode(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def name(self) -> str:
+ """
+ A string naming this mode (e.g. "ECB", "CBC").
+ """
+
+ @abc.abstractmethod
+ def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None:
+ """
+ Checks that all the necessary invariants of this (mode, algorithm)
+ combination are met.
+ """
+
+
+class ModeWithInitializationVector(Mode, metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def initialization_vector(self) -> bytes:
+ """
+ The value of the initialization vector for this mode as bytes.
+ """
+
+
+class ModeWithTweak(Mode, metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def tweak(self) -> bytes:
+ """
+ The value of the tweak for this mode as bytes.
+ """
+
+
+class ModeWithNonce(Mode, metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def nonce(self) -> bytes:
+ """
+ The value of the nonce for this mode as bytes.
+ """
+
+
+class ModeWithAuthenticationTag(Mode, metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def tag(self) -> typing.Optional[bytes]:
+ """
+ The value of the tag supplied to the constructor of this mode.
+ """
+
+
+def _check_aes_key_length(self: Mode, algorithm: CipherAlgorithm) -> None:
+ if algorithm.key_size > 256 and algorithm.name == "AES":
+ raise ValueError(
+ "Only 128, 192, and 256 bit keys are allowed for this AES mode"
+ )
+
+
+def _check_iv_length(
+ self: ModeWithInitializationVector, algorithm: BlockCipherAlgorithm
+) -> None:
+ if len(self.initialization_vector) * 8 != algorithm.block_size:
+ raise ValueError(
+ "Invalid IV size ({}) for {}.".format(
+ len(self.initialization_vector), self.name
+ )
+ )
+
+
+def _check_nonce_length(
+ nonce: bytes, name: str, algorithm: CipherAlgorithm
+) -> None:
+ if not isinstance(algorithm, BlockCipherAlgorithm):
+ raise UnsupportedAlgorithm(
+ f"{name} requires a block cipher algorithm",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+ if len(nonce) * 8 != algorithm.block_size:
+ raise ValueError(f"Invalid nonce size ({len(nonce)}) for {name}.")
+
+
+def _check_iv_and_key_length(
+ self: ModeWithInitializationVector, algorithm: CipherAlgorithm
+) -> None:
+ if not isinstance(algorithm, BlockCipherAlgorithm):
+ raise UnsupportedAlgorithm(
+ f"{self} requires a block cipher algorithm",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+ _check_aes_key_length(self, algorithm)
+ _check_iv_length(self, algorithm)
+
+
+class CBC(ModeWithInitializationVector):
+ name = "CBC"
+
+ def __init__(self, initialization_vector: bytes):
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ self._initialization_vector = initialization_vector
+
+ @property
+ def initialization_vector(self) -> bytes:
+ return self._initialization_vector
+
+ validate_for_algorithm = _check_iv_and_key_length
+
+
+class XTS(ModeWithTweak):
+ name = "XTS"
+
+ def __init__(self, tweak: bytes):
+ utils._check_byteslike("tweak", tweak)
+
+ if len(tweak) != 16:
+ raise ValueError("tweak must be 128-bits (16 bytes)")
+
+ self._tweak = tweak
+
+ @property
+ def tweak(self) -> bytes:
+ return self._tweak
+
+ def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None:
+ if isinstance(algorithm, (algorithms.AES128, algorithms.AES256)):
+ raise TypeError(
+ "The AES128 and AES256 classes do not support XTS, please use "
+ "the standard AES class instead."
+ )
+
+ if algorithm.key_size not in (256, 512):
+ raise ValueError(
+ "The XTS specification requires a 256-bit key for AES-128-XTS"
+ " and 512-bit key for AES-256-XTS"
+ )
+
+
+class ECB(Mode):
+ name = "ECB"
+
+ validate_for_algorithm = _check_aes_key_length
+
+
+class OFB(ModeWithInitializationVector):
+ name = "OFB"
+
+ def __init__(self, initialization_vector: bytes):
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ self._initialization_vector = initialization_vector
+
+ @property
+ def initialization_vector(self) -> bytes:
+ return self._initialization_vector
+
+ validate_for_algorithm = _check_iv_and_key_length
+
+
+class CFB(ModeWithInitializationVector):
+ name = "CFB"
+
+ def __init__(self, initialization_vector: bytes):
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ self._initialization_vector = initialization_vector
+
+ @property
+ def initialization_vector(self) -> bytes:
+ return self._initialization_vector
+
+ validate_for_algorithm = _check_iv_and_key_length
+
+
+class CFB8(ModeWithInitializationVector):
+ name = "CFB8"
+
+ def __init__(self, initialization_vector: bytes):
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ self._initialization_vector = initialization_vector
+
+ @property
+ def initialization_vector(self) -> bytes:
+ return self._initialization_vector
+
+ validate_for_algorithm = _check_iv_and_key_length
+
+
+class CTR(ModeWithNonce):
+ name = "CTR"
+
+ def __init__(self, nonce: bytes):
+ utils._check_byteslike("nonce", nonce)
+ self._nonce = nonce
+
+ @property
+ def nonce(self) -> bytes:
+ return self._nonce
+
+ def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None:
+ _check_aes_key_length(self, algorithm)
+ _check_nonce_length(self.nonce, self.name, algorithm)
+
+
+class GCM(ModeWithInitializationVector, ModeWithAuthenticationTag):
+ name = "GCM"
+ _MAX_ENCRYPTED_BYTES = (2**39 - 256) // 8
+ _MAX_AAD_BYTES = (2**64) // 8
+
+ def __init__(
+ self,
+ initialization_vector: bytes,
+ tag: typing.Optional[bytes] = None,
+ min_tag_length: int = 16,
+ ):
+ # OpenSSL 3.0.0 constrains GCM IVs to [64, 1024] bits inclusive
+ # This is a sane limit anyway so we'll enforce it here.
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ if len(initialization_vector) < 8 or len(initialization_vector) > 128:
+ raise ValueError(
+ "initialization_vector must be between 8 and 128 bytes (64 "
+ "and 1024 bits)."
+ )
+ self._initialization_vector = initialization_vector
+ if tag is not None:
+ utils._check_bytes("tag", tag)
+ if min_tag_length < 4:
+ raise ValueError("min_tag_length must be >= 4")
+ if len(tag) < min_tag_length:
+ raise ValueError(
+ "Authentication tag must be {} bytes or longer.".format(
+ min_tag_length
+ )
+ )
+ self._tag = tag
+ self._min_tag_length = min_tag_length
+
+ @property
+ def tag(self) -> typing.Optional[bytes]:
+ return self._tag
+
+ @property
+ def initialization_vector(self) -> bytes:
+ return self._initialization_vector
+
+ def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None:
+ _check_aes_key_length(self, algorithm)
+ if not isinstance(algorithm, BlockCipherAlgorithm):
+ raise UnsupportedAlgorithm(
+ "GCM requires a block cipher algorithm",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+ block_size_bytes = algorithm.block_size // 8
+ if self._tag is not None and len(self._tag) > block_size_bytes:
+ raise ValueError(
+ "Authentication tag cannot be more than {} bytes.".format(
+ block_size_bytes
+ )
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/cmac.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/cmac.py
new file mode 100644
index 00000000..8aa1d791
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/cmac.py
@@ -0,0 +1,65 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import AlreadyFinalized
+from cryptography.hazmat.primitives import ciphers
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.cmac import _CMACContext
+
+
+class CMAC:
+ _ctx: typing.Optional[_CMACContext]
+ _algorithm: ciphers.BlockCipherAlgorithm
+
+ def __init__(
+ self,
+ algorithm: ciphers.BlockCipherAlgorithm,
+ backend: typing.Any = None,
+ ctx: typing.Optional[_CMACContext] = None,
+ ) -> None:
+ if not isinstance(algorithm, ciphers.BlockCipherAlgorithm):
+ raise TypeError("Expected instance of BlockCipherAlgorithm.")
+ self._algorithm = algorithm
+
+ if ctx is None:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ self._ctx = ossl.create_cmac_ctx(self._algorithm)
+ else:
+ self._ctx = ctx
+
+ def update(self, data: bytes) -> None:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ utils._check_bytes("data", data)
+ self._ctx.update(data)
+
+ def finalize(self) -> bytes:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ digest = self._ctx.finalize()
+ self._ctx = None
+ return digest
+
+ def verify(self, signature: bytes) -> None:
+ utils._check_bytes("signature", signature)
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ ctx, self._ctx = self._ctx, None
+ ctx.verify(signature)
+
+ def copy(self) -> CMAC:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ return CMAC(self._algorithm, ctx=self._ctx.copy())
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/constant_time.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/constant_time.py
new file mode 100644
index 00000000..3975c714
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/constant_time.py
@@ -0,0 +1,14 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import hmac
+
+
+def bytes_eq(a: bytes, b: bytes) -> bool:
+ if not isinstance(a, bytes) or not isinstance(b, bytes):
+ raise TypeError("a and b must be bytes.")
+
+ return hmac.compare_digest(a, b)
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hashes.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hashes.py
new file mode 100644
index 00000000..b6a7ff14
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hashes.py
@@ -0,0 +1,243 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+
+__all__ = [
+ "HashAlgorithm",
+ "HashContext",
+ "Hash",
+ "ExtendableOutputFunction",
+ "SHA1",
+ "SHA512_224",
+ "SHA512_256",
+ "SHA224",
+ "SHA256",
+ "SHA384",
+ "SHA512",
+ "SHA3_224",
+ "SHA3_256",
+ "SHA3_384",
+ "SHA3_512",
+ "SHAKE128",
+ "SHAKE256",
+ "MD5",
+ "BLAKE2b",
+ "BLAKE2s",
+ "SM3",
+]
+
+
+class HashAlgorithm(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def name(self) -> str:
+ """
+ A string naming this algorithm (e.g. "sha256", "md5").
+ """
+
+ @property
+ @abc.abstractmethod
+ def digest_size(self) -> int:
+ """
+ The size of the resulting digest in bytes.
+ """
+
+ @property
+ @abc.abstractmethod
+ def block_size(self) -> typing.Optional[int]:
+ """
+ The internal block size of the hash function, or None if the hash
+ function does not use blocks internally (e.g. SHA3).
+ """
+
+
+class HashContext(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def algorithm(self) -> HashAlgorithm:
+ """
+ A HashAlgorithm that will be used by this context.
+ """
+
+ @abc.abstractmethod
+ def update(self, data: bytes) -> None:
+ """
+ Processes the provided bytes through the hash.
+ """
+
+ @abc.abstractmethod
+ def finalize(self) -> bytes:
+ """
+ Finalizes the hash context and returns the hash digest as bytes.
+ """
+
+ @abc.abstractmethod
+ def copy(self) -> HashContext:
+ """
+ Return a HashContext that is a copy of the current context.
+ """
+
+
+Hash = rust_openssl.hashes.Hash
+HashContext.register(Hash)
+
+
+class ExtendableOutputFunction(metaclass=abc.ABCMeta):
+ """
+ An interface for extendable output functions.
+ """
+
+
+class SHA1(HashAlgorithm):
+ name = "sha1"
+ digest_size = 20
+ block_size = 64
+
+
+class SHA512_224(HashAlgorithm): # noqa: N801
+ name = "sha512-224"
+ digest_size = 28
+ block_size = 128
+
+
+class SHA512_256(HashAlgorithm): # noqa: N801
+ name = "sha512-256"
+ digest_size = 32
+ block_size = 128
+
+
+class SHA224(HashAlgorithm):
+ name = "sha224"
+ digest_size = 28
+ block_size = 64
+
+
+class SHA256(HashAlgorithm):
+ name = "sha256"
+ digest_size = 32
+ block_size = 64
+
+
+class SHA384(HashAlgorithm):
+ name = "sha384"
+ digest_size = 48
+ block_size = 128
+
+
+class SHA512(HashAlgorithm):
+ name = "sha512"
+ digest_size = 64
+ block_size = 128
+
+
+class SHA3_224(HashAlgorithm): # noqa: N801
+ name = "sha3-224"
+ digest_size = 28
+ block_size = None
+
+
+class SHA3_256(HashAlgorithm): # noqa: N801
+ name = "sha3-256"
+ digest_size = 32
+ block_size = None
+
+
+class SHA3_384(HashAlgorithm): # noqa: N801
+ name = "sha3-384"
+ digest_size = 48
+ block_size = None
+
+
+class SHA3_512(HashAlgorithm): # noqa: N801
+ name = "sha3-512"
+ digest_size = 64
+ block_size = None
+
+
+class SHAKE128(HashAlgorithm, ExtendableOutputFunction):
+ name = "shake128"
+ block_size = None
+
+ def __init__(self, digest_size: int):
+ if not isinstance(digest_size, int):
+ raise TypeError("digest_size must be an integer")
+
+ if digest_size < 1:
+ raise ValueError("digest_size must be a positive integer")
+
+ self._digest_size = digest_size
+
+ @property
+ def digest_size(self) -> int:
+ return self._digest_size
+
+
+class SHAKE256(HashAlgorithm, ExtendableOutputFunction):
+ name = "shake256"
+ block_size = None
+
+ def __init__(self, digest_size: int):
+ if not isinstance(digest_size, int):
+ raise TypeError("digest_size must be an integer")
+
+ if digest_size < 1:
+ raise ValueError("digest_size must be a positive integer")
+
+ self._digest_size = digest_size
+
+ @property
+ def digest_size(self) -> int:
+ return self._digest_size
+
+
+class MD5(HashAlgorithm):
+ name = "md5"
+ digest_size = 16
+ block_size = 64
+
+
+class BLAKE2b(HashAlgorithm):
+ name = "blake2b"
+ _max_digest_size = 64
+ _min_digest_size = 1
+ block_size = 128
+
+ def __init__(self, digest_size: int):
+ if digest_size != 64:
+ raise ValueError("Digest size must be 64")
+
+ self._digest_size = digest_size
+
+ @property
+ def digest_size(self) -> int:
+ return self._digest_size
+
+
+class BLAKE2s(HashAlgorithm):
+ name = "blake2s"
+ block_size = 64
+ _max_digest_size = 32
+ _min_digest_size = 1
+
+ def __init__(self, digest_size: int):
+ if digest_size != 32:
+ raise ValueError("Digest size must be 32")
+
+ self._digest_size = digest_size
+
+ @property
+ def digest_size(self) -> int:
+ return self._digest_size
+
+
+class SM3(HashAlgorithm):
+ name = "sm3"
+ digest_size = 32
+ block_size = 64
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hmac.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hmac.py
new file mode 100644
index 00000000..a9442d59
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/hmac.py
@@ -0,0 +1,13 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import hashes
+
+__all__ = ["HMAC"]
+
+HMAC = rust_openssl.hmac.HMAC
+hashes.HashContext.register(HMAC)
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__init__.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__init__.py
new file mode 100644
index 00000000..79bb459f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__init__.py
@@ -0,0 +1,23 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+
+
+class KeyDerivationFunction(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def derive(self, key_material: bytes) -> bytes:
+ """
+ Deterministically generates and returns a new key based on the existing
+ key material.
+ """
+
+ @abc.abstractmethod
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ """
+ Checks whether the key generated by the key material matches the
+ expected derived key. Raises an exception if they do not match.
+ """
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..5885aa81
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc
new file mode 100644
index 00000000..e022f3ad
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc
new file mode 100644
index 00000000..a1eccb58
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc
new file mode 100644
index 00000000..ec4bc1ee
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc
new file mode 100644
index 00000000..17ea3f88
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc
new file mode 100644
index 00000000..192565e8
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc
new file mode 100644
index 00000000..2f1c1b94
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py
new file mode 100644
index 00000000..d5ea58a9
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py
@@ -0,0 +1,124 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import AlreadyFinalized, InvalidKey
+from cryptography.hazmat.primitives import constant_time, hashes, hmac
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+def _int_to_u32be(n: int) -> bytes:
+ return n.to_bytes(length=4, byteorder="big")
+
+
+def _common_args_checks(
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ otherinfo: typing.Optional[bytes],
+) -> None:
+ max_length = algorithm.digest_size * (2**32 - 1)
+ if length > max_length:
+ raise ValueError(f"Cannot derive keys larger than {max_length} bits.")
+ if otherinfo is not None:
+ utils._check_bytes("otherinfo", otherinfo)
+
+
+def _concatkdf_derive(
+ key_material: bytes,
+ length: int,
+ auxfn: typing.Callable[[], hashes.HashContext],
+ otherinfo: bytes,
+) -> bytes:
+ utils._check_byteslike("key_material", key_material)
+ output = [b""]
+ outlen = 0
+ counter = 1
+
+ while length > outlen:
+ h = auxfn()
+ h.update(_int_to_u32be(counter))
+ h.update(key_material)
+ h.update(otherinfo)
+ output.append(h.finalize())
+ outlen += len(output[-1])
+ counter += 1
+
+ return b"".join(output)[:length]
+
+
+class ConcatKDFHash(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ otherinfo: typing.Optional[bytes],
+ backend: typing.Any = None,
+ ):
+ _common_args_checks(algorithm, length, otherinfo)
+ self._algorithm = algorithm
+ self._length = length
+ self._otherinfo: bytes = otherinfo if otherinfo is not None else b""
+
+ self._used = False
+
+ def _hash(self) -> hashes.Hash:
+ return hashes.Hash(self._algorithm)
+
+ def derive(self, key_material: bytes) -> bytes:
+ if self._used:
+ raise AlreadyFinalized
+ self._used = True
+ return _concatkdf_derive(
+ key_material, self._length, self._hash, self._otherinfo
+ )
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
+
+
+class ConcatKDFHMAC(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ salt: typing.Optional[bytes],
+ otherinfo: typing.Optional[bytes],
+ backend: typing.Any = None,
+ ):
+ _common_args_checks(algorithm, length, otherinfo)
+ self._algorithm = algorithm
+ self._length = length
+ self._otherinfo: bytes = otherinfo if otherinfo is not None else b""
+
+ if algorithm.block_size is None:
+ raise TypeError(f"{algorithm.name} is unsupported for ConcatKDF")
+
+ if salt is None:
+ salt = b"\x00" * algorithm.block_size
+ else:
+ utils._check_bytes("salt", salt)
+
+ self._salt = salt
+
+ self._used = False
+
+ def _hmac(self) -> hmac.HMAC:
+ return hmac.HMAC(self._salt, self._algorithm)
+
+ def derive(self, key_material: bytes) -> bytes:
+ if self._used:
+ raise AlreadyFinalized
+ self._used = True
+ return _concatkdf_derive(
+ key_material, self._length, self._hmac, self._otherinfo
+ )
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py
new file mode 100644
index 00000000..d4768944
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py
@@ -0,0 +1,101 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import AlreadyFinalized, InvalidKey
+from cryptography.hazmat.primitives import constant_time, hashes, hmac
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+class HKDF(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ salt: typing.Optional[bytes],
+ info: typing.Optional[bytes],
+ backend: typing.Any = None,
+ ):
+ self._algorithm = algorithm
+
+ if salt is None:
+ salt = b"\x00" * self._algorithm.digest_size
+ else:
+ utils._check_bytes("salt", salt)
+
+ self._salt = salt
+
+ self._hkdf_expand = HKDFExpand(self._algorithm, length, info)
+
+ def _extract(self, key_material: bytes) -> bytes:
+ h = hmac.HMAC(self._salt, self._algorithm)
+ h.update(key_material)
+ return h.finalize()
+
+ def derive(self, key_material: bytes) -> bytes:
+ utils._check_byteslike("key_material", key_material)
+ return self._hkdf_expand.derive(self._extract(key_material))
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
+
+
+class HKDFExpand(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ info: typing.Optional[bytes],
+ backend: typing.Any = None,
+ ):
+ self._algorithm = algorithm
+
+ max_length = 255 * algorithm.digest_size
+
+ if length > max_length:
+ raise ValueError(
+ f"Cannot derive keys larger than {max_length} octets."
+ )
+
+ self._length = length
+
+ if info is None:
+ info = b""
+ else:
+ utils._check_bytes("info", info)
+
+ self._info = info
+
+ self._used = False
+
+ def _expand(self, key_material: bytes) -> bytes:
+ output = [b""]
+ counter = 1
+
+ while self._algorithm.digest_size * (len(output) - 1) < self._length:
+ h = hmac.HMAC(key_material, self._algorithm)
+ h.update(output[-1])
+ h.update(self._info)
+ h.update(bytes([counter]))
+ output.append(h.finalize())
+ counter += 1
+
+ return b"".join(output)[: self._length]
+
+ def derive(self, key_material: bytes) -> bytes:
+ utils._check_byteslike("key_material", key_material)
+ if self._used:
+ raise AlreadyFinalized
+
+ self._used = True
+ return self._expand(key_material)
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py
new file mode 100644
index 00000000..96776382
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py
@@ -0,0 +1,299 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.primitives import (
+ ciphers,
+ cmac,
+ constant_time,
+ hashes,
+ hmac,
+)
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+class Mode(utils.Enum):
+ CounterMode = "ctr"
+
+
+class CounterLocation(utils.Enum):
+ BeforeFixed = "before_fixed"
+ AfterFixed = "after_fixed"
+ MiddleFixed = "middle_fixed"
+
+
+class _KBKDFDeriver:
+ def __init__(
+ self,
+ prf: typing.Callable,
+ mode: Mode,
+ length: int,
+ rlen: int,
+ llen: typing.Optional[int],
+ location: CounterLocation,
+ break_location: typing.Optional[int],
+ label: typing.Optional[bytes],
+ context: typing.Optional[bytes],
+ fixed: typing.Optional[bytes],
+ ):
+ assert callable(prf)
+
+ if not isinstance(mode, Mode):
+ raise TypeError("mode must be of type Mode")
+
+ if not isinstance(location, CounterLocation):
+ raise TypeError("location must be of type CounterLocation")
+
+ if break_location is None and location is CounterLocation.MiddleFixed:
+ raise ValueError("Please specify a break_location")
+
+ if (
+ break_location is not None
+ and location != CounterLocation.MiddleFixed
+ ):
+ raise ValueError(
+ "break_location is ignored when location is not"
+ " CounterLocation.MiddleFixed"
+ )
+
+ if break_location is not None and not isinstance(break_location, int):
+ raise TypeError("break_location must be an integer")
+
+ if break_location is not None and break_location < 0:
+ raise ValueError("break_location must be a positive integer")
+
+ if (label or context) and fixed:
+ raise ValueError(
+ "When supplying fixed data, " "label and context are ignored."
+ )
+
+ if rlen is None or not self._valid_byte_length(rlen):
+ raise ValueError("rlen must be between 1 and 4")
+
+ if llen is None and fixed is None:
+ raise ValueError("Please specify an llen")
+
+ if llen is not None and not isinstance(llen, int):
+ raise TypeError("llen must be an integer")
+
+ if label is None:
+ label = b""
+
+ if context is None:
+ context = b""
+
+ utils._check_bytes("label", label)
+ utils._check_bytes("context", context)
+ self._prf = prf
+ self._mode = mode
+ self._length = length
+ self._rlen = rlen
+ self._llen = llen
+ self._location = location
+ self._break_location = break_location
+ self._label = label
+ self._context = context
+ self._used = False
+ self._fixed_data = fixed
+
+ @staticmethod
+ def _valid_byte_length(value: int) -> bool:
+ if not isinstance(value, int):
+ raise TypeError("value must be of type int")
+
+ value_bin = utils.int_to_bytes(1, value)
+ if not 1 <= len(value_bin) <= 4:
+ return False
+ return True
+
+ def derive(self, key_material: bytes, prf_output_size: int) -> bytes:
+ if self._used:
+ raise AlreadyFinalized
+
+ utils._check_byteslike("key_material", key_material)
+ self._used = True
+
+ # inverse floor division (equivalent to ceiling)
+ rounds = -(-self._length // prf_output_size)
+
+ output = [b""]
+
+ # For counter mode, the number of iterations shall not be
+ # larger than 2^r-1, where r <= 32 is the binary length of the counter
+ # This ensures that the counter values used as an input to the
+ # PRF will not repeat during a particular call to the KDF function.
+ r_bin = utils.int_to_bytes(1, self._rlen)
+ if rounds > pow(2, len(r_bin) * 8) - 1:
+ raise ValueError("There are too many iterations.")
+
+ fixed = self._generate_fixed_input()
+
+ if self._location == CounterLocation.BeforeFixed:
+ data_before_ctr = b""
+ data_after_ctr = fixed
+ elif self._location == CounterLocation.AfterFixed:
+ data_before_ctr = fixed
+ data_after_ctr = b""
+ else:
+ if isinstance(
+ self._break_location, int
+ ) and self._break_location > len(fixed):
+ raise ValueError("break_location offset > len(fixed)")
+ data_before_ctr = fixed[: self._break_location]
+ data_after_ctr = fixed[self._break_location :]
+
+ for i in range(1, rounds + 1):
+ h = self._prf(key_material)
+
+ counter = utils.int_to_bytes(i, self._rlen)
+ input_data = data_before_ctr + counter + data_after_ctr
+
+ h.update(input_data)
+
+ output.append(h.finalize())
+
+ return b"".join(output)[: self._length]
+
+ def _generate_fixed_input(self) -> bytes:
+ if self._fixed_data and isinstance(self._fixed_data, bytes):
+ return self._fixed_data
+
+ l_val = utils.int_to_bytes(self._length * 8, self._llen)
+
+ return b"".join([self._label, b"\x00", self._context, l_val])
+
+
+class KBKDFHMAC(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ mode: Mode,
+ length: int,
+ rlen: int,
+ llen: typing.Optional[int],
+ location: CounterLocation,
+ label: typing.Optional[bytes],
+ context: typing.Optional[bytes],
+ fixed: typing.Optional[bytes],
+ backend: typing.Any = None,
+ *,
+ break_location: typing.Optional[int] = None,
+ ):
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise UnsupportedAlgorithm(
+ "Algorithm supplied is not a supported hash algorithm.",
+ _Reasons.UNSUPPORTED_HASH,
+ )
+
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ if not ossl.hmac_supported(algorithm):
+ raise UnsupportedAlgorithm(
+ "Algorithm supplied is not a supported hmac algorithm.",
+ _Reasons.UNSUPPORTED_HASH,
+ )
+
+ self._algorithm = algorithm
+
+ self._deriver = _KBKDFDeriver(
+ self._prf,
+ mode,
+ length,
+ rlen,
+ llen,
+ location,
+ break_location,
+ label,
+ context,
+ fixed,
+ )
+
+ def _prf(self, key_material: bytes) -> hmac.HMAC:
+ return hmac.HMAC(key_material, self._algorithm)
+
+ def derive(self, key_material: bytes) -> bytes:
+ return self._deriver.derive(key_material, self._algorithm.digest_size)
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
+
+
+class KBKDFCMAC(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm,
+ mode: Mode,
+ length: int,
+ rlen: int,
+ llen: typing.Optional[int],
+ location: CounterLocation,
+ label: typing.Optional[bytes],
+ context: typing.Optional[bytes],
+ fixed: typing.Optional[bytes],
+ backend: typing.Any = None,
+ *,
+ break_location: typing.Optional[int] = None,
+ ):
+ if not issubclass(
+ algorithm, ciphers.BlockCipherAlgorithm
+ ) or not issubclass(algorithm, ciphers.CipherAlgorithm):
+ raise UnsupportedAlgorithm(
+ "Algorithm supplied is not a supported cipher algorithm.",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ self._algorithm = algorithm
+ self._cipher: typing.Optional[ciphers.BlockCipherAlgorithm] = None
+
+ self._deriver = _KBKDFDeriver(
+ self._prf,
+ mode,
+ length,
+ rlen,
+ llen,
+ location,
+ break_location,
+ label,
+ context,
+ fixed,
+ )
+
+ def _prf(self, _: bytes) -> cmac.CMAC:
+ assert self._cipher is not None
+
+ return cmac.CMAC(self._cipher)
+
+ def derive(self, key_material: bytes) -> bytes:
+ self._cipher = self._algorithm(key_material)
+
+ assert self._cipher is not None
+
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ if not ossl.cmac_algorithm_supported(self._cipher):
+ raise UnsupportedAlgorithm(
+ "Algorithm supplied is not a supported cipher algorithm.",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ return self._deriver.derive(key_material, self._cipher.block_size // 8)
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py
new file mode 100644
index 00000000..623e1ca7
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py
@@ -0,0 +1,64 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import constant_time, hashes
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+class PBKDF2HMAC(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ salt: bytes,
+ iterations: int,
+ backend: typing.Any = None,
+ ):
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ if not ossl.pbkdf2_hmac_supported(algorithm):
+ raise UnsupportedAlgorithm(
+ "{} is not supported for PBKDF2 by this backend.".format(
+ algorithm.name
+ ),
+ _Reasons.UNSUPPORTED_HASH,
+ )
+ self._used = False
+ self._algorithm = algorithm
+ self._length = length
+ utils._check_bytes("salt", salt)
+ self._salt = salt
+ self._iterations = iterations
+
+ def derive(self, key_material: bytes) -> bytes:
+ if self._used:
+ raise AlreadyFinalized("PBKDF2 instances can only be used once.")
+ self._used = True
+
+ return rust_openssl.kdf.derive_pbkdf2_hmac(
+ key_material,
+ self._algorithm,
+ self._salt,
+ self._iterations,
+ self._length,
+ )
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ derived_key = self.derive(key_material)
+ if not constant_time.bytes_eq(derived_key, expected_key):
+ raise InvalidKey("Keys do not match.")
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py
new file mode 100644
index 00000000..05a4f675
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py
@@ -0,0 +1,80 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import sys
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+)
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import constant_time
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+# This is used by the scrypt tests to skip tests that require more memory
+# than the MEM_LIMIT
+_MEM_LIMIT = sys.maxsize // 2
+
+
+class Scrypt(KeyDerivationFunction):
+ def __init__(
+ self,
+ salt: bytes,
+ length: int,
+ n: int,
+ r: int,
+ p: int,
+ backend: typing.Any = None,
+ ):
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ if not ossl.scrypt_supported():
+ raise UnsupportedAlgorithm(
+ "This version of OpenSSL does not support scrypt"
+ )
+ self._length = length
+ utils._check_bytes("salt", salt)
+ if n < 2 or (n & (n - 1)) != 0:
+ raise ValueError("n must be greater than 1 and be a power of 2.")
+
+ if r < 1:
+ raise ValueError("r must be greater than or equal to 1.")
+
+ if p < 1:
+ raise ValueError("p must be greater than or equal to 1.")
+
+ self._used = False
+ self._salt = salt
+ self._n = n
+ self._r = r
+ self._p = p
+
+ def derive(self, key_material: bytes) -> bytes:
+ if self._used:
+ raise AlreadyFinalized("Scrypt instances can only be used once.")
+ self._used = True
+
+ utils._check_byteslike("key_material", key_material)
+
+ return rust_openssl.kdf.derive_scrypt(
+ key_material,
+ self._salt,
+ self._n,
+ self._r,
+ self._p,
+ _MEM_LIMIT,
+ self._length,
+ )
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ derived_key = self.derive(key_material)
+ if not constant_time.bytes_eq(derived_key, expected_key):
+ raise InvalidKey("Keys do not match.")
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py
new file mode 100644
index 00000000..17acc517
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py
@@ -0,0 +1,61 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import AlreadyFinalized, InvalidKey
+from cryptography.hazmat.primitives import constant_time, hashes
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+def _int_to_u32be(n: int) -> bytes:
+ return n.to_bytes(length=4, byteorder="big")
+
+
+class X963KDF(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ sharedinfo: typing.Optional[bytes],
+ backend: typing.Any = None,
+ ):
+ max_len = algorithm.digest_size * (2**32 - 1)
+ if length > max_len:
+ raise ValueError(f"Cannot derive keys larger than {max_len} bits.")
+ if sharedinfo is not None:
+ utils._check_bytes("sharedinfo", sharedinfo)
+
+ self._algorithm = algorithm
+ self._length = length
+ self._sharedinfo = sharedinfo
+ self._used = False
+
+ def derive(self, key_material: bytes) -> bytes:
+ if self._used:
+ raise AlreadyFinalized
+ self._used = True
+ utils._check_byteslike("key_material", key_material)
+ output = [b""]
+ outlen = 0
+ counter = 1
+
+ while self._length > outlen:
+ h = hashes.Hash(self._algorithm)
+ h.update(key_material)
+ h.update(_int_to_u32be(counter))
+ if self._sharedinfo is not None:
+ h.update(self._sharedinfo)
+ output.append(h.finalize())
+ outlen += len(output[-1])
+ counter += 1
+
+ return b"".join(output)[: self._length]
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/keywrap.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/keywrap.py
new file mode 100644
index 00000000..59b0326c
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/keywrap.py
@@ -0,0 +1,177 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.hazmat.primitives.ciphers import Cipher
+from cryptography.hazmat.primitives.ciphers.algorithms import AES
+from cryptography.hazmat.primitives.ciphers.modes import ECB
+from cryptography.hazmat.primitives.constant_time import bytes_eq
+
+
+def _wrap_core(
+ wrapping_key: bytes,
+ a: bytes,
+ r: typing.List[bytes],
+) -> bytes:
+ # RFC 3394 Key Wrap - 2.2.1 (index method)
+ encryptor = Cipher(AES(wrapping_key), ECB()).encryptor()
+ n = len(r)
+ for j in range(6):
+ for i in range(n):
+ # every encryption operation is a discrete 16 byte chunk (because
+ # AES has a 128-bit block size) and since we're using ECB it is
+ # safe to reuse the encryptor for the entire operation
+ b = encryptor.update(a + r[i])
+ a = (
+ int.from_bytes(b[:8], byteorder="big") ^ ((n * j) + i + 1)
+ ).to_bytes(length=8, byteorder="big")
+ r[i] = b[-8:]
+
+ assert encryptor.finalize() == b""
+
+ return a + b"".join(r)
+
+
+def aes_key_wrap(
+ wrapping_key: bytes,
+ key_to_wrap: bytes,
+ backend: typing.Any = None,
+) -> bytes:
+ if len(wrapping_key) not in [16, 24, 32]:
+ raise ValueError("The wrapping key must be a valid AES key length")
+
+ if len(key_to_wrap) < 16:
+ raise ValueError("The key to wrap must be at least 16 bytes")
+
+ if len(key_to_wrap) % 8 != 0:
+ raise ValueError("The key to wrap must be a multiple of 8 bytes")
+
+ a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6"
+ r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)]
+ return _wrap_core(wrapping_key, a, r)
+
+
+def _unwrap_core(
+ wrapping_key: bytes,
+ a: bytes,
+ r: typing.List[bytes],
+) -> typing.Tuple[bytes, typing.List[bytes]]:
+ # Implement RFC 3394 Key Unwrap - 2.2.2 (index method)
+ decryptor = Cipher(AES(wrapping_key), ECB()).decryptor()
+ n = len(r)
+ for j in reversed(range(6)):
+ for i in reversed(range(n)):
+ atr = (
+ int.from_bytes(a, byteorder="big") ^ ((n * j) + i + 1)
+ ).to_bytes(length=8, byteorder="big") + r[i]
+ # every decryption operation is a discrete 16 byte chunk so
+ # it is safe to reuse the decryptor for the entire operation
+ b = decryptor.update(atr)
+ a = b[:8]
+ r[i] = b[-8:]
+
+ assert decryptor.finalize() == b""
+ return a, r
+
+
+def aes_key_wrap_with_padding(
+ wrapping_key: bytes,
+ key_to_wrap: bytes,
+ backend: typing.Any = None,
+) -> bytes:
+ if len(wrapping_key) not in [16, 24, 32]:
+ raise ValueError("The wrapping key must be a valid AES key length")
+
+ aiv = b"\xA6\x59\x59\xA6" + len(key_to_wrap).to_bytes(
+ length=4, byteorder="big"
+ )
+ # pad the key to wrap if necessary
+ pad = (8 - (len(key_to_wrap) % 8)) % 8
+ key_to_wrap = key_to_wrap + b"\x00" * pad
+ if len(key_to_wrap) == 8:
+ # RFC 5649 - 4.1 - exactly 8 octets after padding
+ encryptor = Cipher(AES(wrapping_key), ECB()).encryptor()
+ b = encryptor.update(aiv + key_to_wrap)
+ assert encryptor.finalize() == b""
+ return b
+ else:
+ r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)]
+ return _wrap_core(wrapping_key, aiv, r)
+
+
+def aes_key_unwrap_with_padding(
+ wrapping_key: bytes,
+ wrapped_key: bytes,
+ backend: typing.Any = None,
+) -> bytes:
+ if len(wrapped_key) < 16:
+ raise InvalidUnwrap("Must be at least 16 bytes")
+
+ if len(wrapping_key) not in [16, 24, 32]:
+ raise ValueError("The wrapping key must be a valid AES key length")
+
+ if len(wrapped_key) == 16:
+ # RFC 5649 - 4.2 - exactly two 64-bit blocks
+ decryptor = Cipher(AES(wrapping_key), ECB()).decryptor()
+ out = decryptor.update(wrapped_key)
+ assert decryptor.finalize() == b""
+ a = out[:8]
+ data = out[8:]
+ n = 1
+ else:
+ r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)]
+ encrypted_aiv = r.pop(0)
+ n = len(r)
+ a, r = _unwrap_core(wrapping_key, encrypted_aiv, r)
+ data = b"".join(r)
+
+ # 1) Check that MSB(32,A) = A65959A6.
+ # 2) Check that 8*(n-1) < LSB(32,A) <= 8*n. If so, let
+ # MLI = LSB(32,A).
+ # 3) Let b = (8*n)-MLI, and then check that the rightmost b octets of
+ # the output data are zero.
+ mli = int.from_bytes(a[4:], byteorder="big")
+ b = (8 * n) - mli
+ if (
+ not bytes_eq(a[:4], b"\xa6\x59\x59\xa6")
+ or not 8 * (n - 1) < mli <= 8 * n
+ or (b != 0 and not bytes_eq(data[-b:], b"\x00" * b))
+ ):
+ raise InvalidUnwrap()
+
+ if b == 0:
+ return data
+ else:
+ return data[:-b]
+
+
+def aes_key_unwrap(
+ wrapping_key: bytes,
+ wrapped_key: bytes,
+ backend: typing.Any = None,
+) -> bytes:
+ if len(wrapped_key) < 24:
+ raise InvalidUnwrap("Must be at least 24 bytes")
+
+ if len(wrapped_key) % 8 != 0:
+ raise InvalidUnwrap("The wrapped key must be a multiple of 8 bytes")
+
+ if len(wrapping_key) not in [16, 24, 32]:
+ raise ValueError("The wrapping key must be a valid AES key length")
+
+ aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6"
+ r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)]
+ a = r.pop(0)
+ a, r = _unwrap_core(wrapping_key, a, r)
+ if not bytes_eq(a, aiv):
+ raise InvalidUnwrap()
+
+ return b"".join(r)
+
+
+class InvalidUnwrap(Exception):
+ pass
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/padding.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/padding.py
new file mode 100644
index 00000000..fde3094b
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/padding.py
@@ -0,0 +1,225 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import AlreadyFinalized
+from cryptography.hazmat.bindings._rust import (
+ check_ansix923_padding,
+ check_pkcs7_padding,
+)
+
+
+class PaddingContext(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def update(self, data: bytes) -> bytes:
+ """
+ Pads the provided bytes and returns any available data as bytes.
+ """
+
+ @abc.abstractmethod
+ def finalize(self) -> bytes:
+ """
+ Finalize the padding, returns bytes.
+ """
+
+
+def _byte_padding_check(block_size: int) -> None:
+ if not (0 <= block_size <= 2040):
+ raise ValueError("block_size must be in range(0, 2041).")
+
+ if block_size % 8 != 0:
+ raise ValueError("block_size must be a multiple of 8.")
+
+
+def _byte_padding_update(
+ buffer_: typing.Optional[bytes], data: bytes, block_size: int
+) -> typing.Tuple[bytes, bytes]:
+ if buffer_ is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ utils._check_byteslike("data", data)
+
+ buffer_ += bytes(data)
+
+ finished_blocks = len(buffer_) // (block_size // 8)
+
+ result = buffer_[: finished_blocks * (block_size // 8)]
+ buffer_ = buffer_[finished_blocks * (block_size // 8) :]
+
+ return buffer_, result
+
+
+def _byte_padding_pad(
+ buffer_: typing.Optional[bytes],
+ block_size: int,
+ paddingfn: typing.Callable[[int], bytes],
+) -> bytes:
+ if buffer_ is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ pad_size = block_size // 8 - len(buffer_)
+ return buffer_ + paddingfn(pad_size)
+
+
+def _byte_unpadding_update(
+ buffer_: typing.Optional[bytes], data: bytes, block_size: int
+) -> typing.Tuple[bytes, bytes]:
+ if buffer_ is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ utils._check_byteslike("data", data)
+
+ buffer_ += bytes(data)
+
+ finished_blocks = max(len(buffer_) // (block_size // 8) - 1, 0)
+
+ result = buffer_[: finished_blocks * (block_size // 8)]
+ buffer_ = buffer_[finished_blocks * (block_size // 8) :]
+
+ return buffer_, result
+
+
+def _byte_unpadding_check(
+ buffer_: typing.Optional[bytes],
+ block_size: int,
+ checkfn: typing.Callable[[bytes], int],
+) -> bytes:
+ if buffer_ is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ if len(buffer_) != block_size // 8:
+ raise ValueError("Invalid padding bytes.")
+
+ valid = checkfn(buffer_)
+
+ if not valid:
+ raise ValueError("Invalid padding bytes.")
+
+ pad_size = buffer_[-1]
+ return buffer_[:-pad_size]
+
+
+class PKCS7:
+ def __init__(self, block_size: int):
+ _byte_padding_check(block_size)
+ self.block_size = block_size
+
+ def padder(self) -> PaddingContext:
+ return _PKCS7PaddingContext(self.block_size)
+
+ def unpadder(self) -> PaddingContext:
+ return _PKCS7UnpaddingContext(self.block_size)
+
+
+class _PKCS7PaddingContext(PaddingContext):
+ _buffer: typing.Optional[bytes]
+
+ def __init__(self, block_size: int):
+ self.block_size = block_size
+ # TODO: more copies than necessary, we should use zero-buffer (#193)
+ self._buffer = b""
+
+ def update(self, data: bytes) -> bytes:
+ self._buffer, result = _byte_padding_update(
+ self._buffer, data, self.block_size
+ )
+ return result
+
+ def _padding(self, size: int) -> bytes:
+ return bytes([size]) * size
+
+ def finalize(self) -> bytes:
+ result = _byte_padding_pad(
+ self._buffer, self.block_size, self._padding
+ )
+ self._buffer = None
+ return result
+
+
+class _PKCS7UnpaddingContext(PaddingContext):
+ _buffer: typing.Optional[bytes]
+
+ def __init__(self, block_size: int):
+ self.block_size = block_size
+ # TODO: more copies than necessary, we should use zero-buffer (#193)
+ self._buffer = b""
+
+ def update(self, data: bytes) -> bytes:
+ self._buffer, result = _byte_unpadding_update(
+ self._buffer, data, self.block_size
+ )
+ return result
+
+ def finalize(self) -> bytes:
+ result = _byte_unpadding_check(
+ self._buffer, self.block_size, check_pkcs7_padding
+ )
+ self._buffer = None
+ return result
+
+
+class ANSIX923:
+ def __init__(self, block_size: int):
+ _byte_padding_check(block_size)
+ self.block_size = block_size
+
+ def padder(self) -> PaddingContext:
+ return _ANSIX923PaddingContext(self.block_size)
+
+ def unpadder(self) -> PaddingContext:
+ return _ANSIX923UnpaddingContext(self.block_size)
+
+
+class _ANSIX923PaddingContext(PaddingContext):
+ _buffer: typing.Optional[bytes]
+
+ def __init__(self, block_size: int):
+ self.block_size = block_size
+ # TODO: more copies than necessary, we should use zero-buffer (#193)
+ self._buffer = b""
+
+ def update(self, data: bytes) -> bytes:
+ self._buffer, result = _byte_padding_update(
+ self._buffer, data, self.block_size
+ )
+ return result
+
+ def _padding(self, size: int) -> bytes:
+ return bytes([0]) * (size - 1) + bytes([size])
+
+ def finalize(self) -> bytes:
+ result = _byte_padding_pad(
+ self._buffer, self.block_size, self._padding
+ )
+ self._buffer = None
+ return result
+
+
+class _ANSIX923UnpaddingContext(PaddingContext):
+ _buffer: typing.Optional[bytes]
+
+ def __init__(self, block_size: int):
+ self.block_size = block_size
+ # TODO: more copies than necessary, we should use zero-buffer (#193)
+ self._buffer = b""
+
+ def update(self, data: bytes) -> bytes:
+ self._buffer, result = _byte_unpadding_update(
+ self._buffer, data, self.block_size
+ )
+ return result
+
+ def finalize(self) -> bytes:
+ result = _byte_unpadding_check(
+ self._buffer,
+ self.block_size,
+ check_ansix923_padding,
+ )
+ self._buffer = None
+ return result
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/poly1305.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/poly1305.py
new file mode 100644
index 00000000..7f5a77a5
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/poly1305.py
@@ -0,0 +1,11 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+
+__all__ = ["Poly1305"]
+
+Poly1305 = rust_openssl.poly1305.Poly1305
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__init__.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__init__.py
new file mode 100644
index 00000000..b6c9a5cd
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__init__.py
@@ -0,0 +1,63 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography.hazmat.primitives._serialization import (
+ BestAvailableEncryption,
+ Encoding,
+ KeySerializationEncryption,
+ NoEncryption,
+ ParameterFormat,
+ PrivateFormat,
+ PublicFormat,
+ _KeySerializationEncryption,
+)
+from cryptography.hazmat.primitives.serialization.base import (
+ load_der_parameters,
+ load_der_private_key,
+ load_der_public_key,
+ load_pem_parameters,
+ load_pem_private_key,
+ load_pem_public_key,
+)
+from cryptography.hazmat.primitives.serialization.ssh import (
+ SSHCertificate,
+ SSHCertificateBuilder,
+ SSHCertificateType,
+ SSHCertPrivateKeyTypes,
+ SSHCertPublicKeyTypes,
+ SSHPrivateKeyTypes,
+ SSHPublicKeyTypes,
+ load_ssh_private_key,
+ load_ssh_public_identity,
+ load_ssh_public_key,
+)
+
+__all__ = [
+ "load_der_parameters",
+ "load_der_private_key",
+ "load_der_public_key",
+ "load_pem_parameters",
+ "load_pem_private_key",
+ "load_pem_public_key",
+ "load_ssh_private_key",
+ "load_ssh_public_identity",
+ "load_ssh_public_key",
+ "Encoding",
+ "PrivateFormat",
+ "PublicFormat",
+ "ParameterFormat",
+ "KeySerializationEncryption",
+ "BestAvailableEncryption",
+ "NoEncryption",
+ "_KeySerializationEncryption",
+ "SSHCertificateBuilder",
+ "SSHCertificate",
+ "SSHCertificateType",
+ "SSHCertPublicKeyTypes",
+ "SSHCertPrivateKeyTypes",
+ "SSHPrivateKeyTypes",
+ "SSHPublicKeyTypes",
+]
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..d9650ebc
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc
new file mode 100644
index 00000000..a6ca4b1a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc
new file mode 100644
index 00000000..b5f803e8
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc
new file mode 100644
index 00000000..9f923d0a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc
new file mode 100644
index 00000000..1555abf7
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/base.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/base.py
new file mode 100644
index 00000000..18a96ccf
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/base.py
@@ -0,0 +1,73 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.hazmat.primitives.asymmetric import dh
+from cryptography.hazmat.primitives.asymmetric.types import (
+ PrivateKeyTypes,
+ PublicKeyTypes,
+)
+
+
+def load_pem_private_key(
+ data: bytes,
+ password: typing.Optional[bytes],
+ backend: typing.Any = None,
+ *,
+ unsafe_skip_rsa_key_validation: bool = False,
+) -> PrivateKeyTypes:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_pem_private_key(
+ data, password, unsafe_skip_rsa_key_validation
+ )
+
+
+def load_pem_public_key(
+ data: bytes, backend: typing.Any = None
+) -> PublicKeyTypes:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_pem_public_key(data)
+
+
+def load_pem_parameters(
+ data: bytes, backend: typing.Any = None
+) -> dh.DHParameters:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_pem_parameters(data)
+
+
+def load_der_private_key(
+ data: bytes,
+ password: typing.Optional[bytes],
+ backend: typing.Any = None,
+ *,
+ unsafe_skip_rsa_key_validation: bool = False,
+) -> PrivateKeyTypes:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_der_private_key(
+ data, password, unsafe_skip_rsa_key_validation
+ )
+
+
+def load_der_public_key(
+ data: bytes, backend: typing.Any = None
+) -> PublicKeyTypes:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_der_public_key(data)
+
+
+def load_der_parameters(
+ data: bytes, backend: typing.Any = None
+) -> dh.DHParameters:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_der_parameters(data)
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py
new file mode 100644
index 00000000..27133a3f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py
@@ -0,0 +1,229 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography import x509
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives._serialization import PBES as PBES
+from cryptography.hazmat.primitives.asymmetric import (
+ dsa,
+ ec,
+ ed448,
+ ed25519,
+ rsa,
+)
+from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
+
+__all__ = [
+ "PBES",
+ "PKCS12PrivateKeyTypes",
+ "PKCS12Certificate",
+ "PKCS12KeyAndCertificates",
+ "load_key_and_certificates",
+ "load_pkcs12",
+ "serialize_key_and_certificates",
+]
+
+PKCS12PrivateKeyTypes = typing.Union[
+ rsa.RSAPrivateKey,
+ dsa.DSAPrivateKey,
+ ec.EllipticCurvePrivateKey,
+ ed25519.Ed25519PrivateKey,
+ ed448.Ed448PrivateKey,
+]
+
+
+class PKCS12Certificate:
+ def __init__(
+ self,
+ cert: x509.Certificate,
+ friendly_name: typing.Optional[bytes],
+ ):
+ if not isinstance(cert, x509.Certificate):
+ raise TypeError("Expecting x509.Certificate object")
+ if friendly_name is not None and not isinstance(friendly_name, bytes):
+ raise TypeError("friendly_name must be bytes or None")
+ self._cert = cert
+ self._friendly_name = friendly_name
+
+ @property
+ def friendly_name(self) -> typing.Optional[bytes]:
+ return self._friendly_name
+
+ @property
+ def certificate(self) -> x509.Certificate:
+ return self._cert
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, PKCS12Certificate):
+ return NotImplemented
+
+ return (
+ self.certificate == other.certificate
+ and self.friendly_name == other.friendly_name
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.certificate, self.friendly_name))
+
+ def __repr__(self) -> str:
+ return "".format(
+ self.certificate, self.friendly_name
+ )
+
+
+class PKCS12KeyAndCertificates:
+ def __init__(
+ self,
+ key: typing.Optional[PrivateKeyTypes],
+ cert: typing.Optional[PKCS12Certificate],
+ additional_certs: typing.List[PKCS12Certificate],
+ ):
+ if key is not None and not isinstance(
+ key,
+ (
+ rsa.RSAPrivateKey,
+ dsa.DSAPrivateKey,
+ ec.EllipticCurvePrivateKey,
+ ed25519.Ed25519PrivateKey,
+ ed448.Ed448PrivateKey,
+ ),
+ ):
+ raise TypeError(
+ "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448"
+ " private key, or None."
+ )
+ if cert is not None and not isinstance(cert, PKCS12Certificate):
+ raise TypeError("cert must be a PKCS12Certificate object or None")
+ if not all(
+ isinstance(add_cert, PKCS12Certificate)
+ for add_cert in additional_certs
+ ):
+ raise TypeError(
+ "all values in additional_certs must be PKCS12Certificate"
+ " objects"
+ )
+ self._key = key
+ self._cert = cert
+ self._additional_certs = additional_certs
+
+ @property
+ def key(self) -> typing.Optional[PrivateKeyTypes]:
+ return self._key
+
+ @property
+ def cert(self) -> typing.Optional[PKCS12Certificate]:
+ return self._cert
+
+ @property
+ def additional_certs(self) -> typing.List[PKCS12Certificate]:
+ return self._additional_certs
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, PKCS12KeyAndCertificates):
+ return NotImplemented
+
+ return (
+ self.key == other.key
+ and self.cert == other.cert
+ and self.additional_certs == other.additional_certs
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.key, self.cert, tuple(self.additional_certs)))
+
+ def __repr__(self) -> str:
+ fmt = (
+ ""
+ )
+ return fmt.format(self.key, self.cert, self.additional_certs)
+
+
+def load_key_and_certificates(
+ data: bytes,
+ password: typing.Optional[bytes],
+ backend: typing.Any = None,
+) -> typing.Tuple[
+ typing.Optional[PrivateKeyTypes],
+ typing.Optional[x509.Certificate],
+ typing.List[x509.Certificate],
+]:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_key_and_certificates_from_pkcs12(data, password)
+
+
+def load_pkcs12(
+ data: bytes,
+ password: typing.Optional[bytes],
+ backend: typing.Any = None,
+) -> PKCS12KeyAndCertificates:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_pkcs12(data, password)
+
+
+_PKCS12CATypes = typing.Union[
+ x509.Certificate,
+ PKCS12Certificate,
+]
+
+
+def serialize_key_and_certificates(
+ name: typing.Optional[bytes],
+ key: typing.Optional[PKCS12PrivateKeyTypes],
+ cert: typing.Optional[x509.Certificate],
+ cas: typing.Optional[typing.Iterable[_PKCS12CATypes]],
+ encryption_algorithm: serialization.KeySerializationEncryption,
+) -> bytes:
+ if key is not None and not isinstance(
+ key,
+ (
+ rsa.RSAPrivateKey,
+ dsa.DSAPrivateKey,
+ ec.EllipticCurvePrivateKey,
+ ed25519.Ed25519PrivateKey,
+ ed448.Ed448PrivateKey,
+ ),
+ ):
+ raise TypeError(
+ "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448"
+ " private key, or None."
+ )
+ if cert is not None and not isinstance(cert, x509.Certificate):
+ raise TypeError("cert must be a certificate or None")
+
+ if cas is not None:
+ cas = list(cas)
+ if not all(
+ isinstance(
+ val,
+ (
+ x509.Certificate,
+ PKCS12Certificate,
+ ),
+ )
+ for val in cas
+ ):
+ raise TypeError("all values in cas must be certificates")
+
+ if not isinstance(
+ encryption_algorithm, serialization.KeySerializationEncryption
+ ):
+ raise TypeError(
+ "Key encryption algorithm must be a "
+ "KeySerializationEncryption instance"
+ )
+
+ if key is None and cert is None and not cas:
+ raise ValueError("You must supply at least one of key, cert, or cas")
+
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.serialize_key_and_certificates_to_pkcs12(
+ name, key, cert, cas, encryption_algorithm
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py
new file mode 100644
index 00000000..9998bcaa
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py
@@ -0,0 +1,235 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import email.base64mime
+import email.generator
+import email.message
+import email.policy
+import io
+import typing
+
+from cryptography import utils, x509
+from cryptography.hazmat.bindings._rust import pkcs7 as rust_pkcs7
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import ec, rsa
+from cryptography.utils import _check_byteslike
+
+
+def load_pem_pkcs7_certificates(data: bytes) -> typing.List[x509.Certificate]:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.load_pem_pkcs7_certificates(data)
+
+
+def load_der_pkcs7_certificates(data: bytes) -> typing.List[x509.Certificate]:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.load_der_pkcs7_certificates(data)
+
+
+def serialize_certificates(
+ certs: typing.List[x509.Certificate],
+ encoding: serialization.Encoding,
+) -> bytes:
+ return rust_pkcs7.serialize_certificates(certs, encoding)
+
+
+PKCS7HashTypes = typing.Union[
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+]
+
+PKCS7PrivateKeyTypes = typing.Union[
+ rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey
+]
+
+
+class PKCS7Options(utils.Enum):
+ Text = "Add text/plain MIME type"
+ Binary = "Don't translate input data into canonical MIME format"
+ DetachedSignature = "Don't embed data in the PKCS7 structure"
+ NoCapabilities = "Don't embed SMIME capabilities"
+ NoAttributes = "Don't embed authenticatedAttributes"
+ NoCerts = "Don't embed signer certificate"
+
+
+class PKCS7SignatureBuilder:
+ def __init__(
+ self,
+ data: typing.Optional[bytes] = None,
+ signers: typing.List[
+ typing.Tuple[
+ x509.Certificate,
+ PKCS7PrivateKeyTypes,
+ PKCS7HashTypes,
+ ]
+ ] = [],
+ additional_certs: typing.List[x509.Certificate] = [],
+ ):
+ self._data = data
+ self._signers = signers
+ self._additional_certs = additional_certs
+
+ def set_data(self, data: bytes) -> PKCS7SignatureBuilder:
+ _check_byteslike("data", data)
+ if self._data is not None:
+ raise ValueError("data may only be set once")
+
+ return PKCS7SignatureBuilder(data, self._signers)
+
+ def add_signer(
+ self,
+ certificate: x509.Certificate,
+ private_key: PKCS7PrivateKeyTypes,
+ hash_algorithm: PKCS7HashTypes,
+ ) -> PKCS7SignatureBuilder:
+ if not isinstance(
+ hash_algorithm,
+ (
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+ ),
+ ):
+ raise TypeError(
+ "hash_algorithm must be one of hashes.SHA224, "
+ "SHA256, SHA384, or SHA512"
+ )
+ if not isinstance(certificate, x509.Certificate):
+ raise TypeError("certificate must be a x509.Certificate")
+
+ if not isinstance(
+ private_key, (rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey)
+ ):
+ raise TypeError("Only RSA & EC keys are supported at this time.")
+
+ return PKCS7SignatureBuilder(
+ self._data,
+ self._signers + [(certificate, private_key, hash_algorithm)],
+ )
+
+ def add_certificate(
+ self, certificate: x509.Certificate
+ ) -> PKCS7SignatureBuilder:
+ if not isinstance(certificate, x509.Certificate):
+ raise TypeError("certificate must be a x509.Certificate")
+
+ return PKCS7SignatureBuilder(
+ self._data, self._signers, self._additional_certs + [certificate]
+ )
+
+ def sign(
+ self,
+ encoding: serialization.Encoding,
+ options: typing.Iterable[PKCS7Options],
+ backend: typing.Any = None,
+ ) -> bytes:
+ if len(self._signers) == 0:
+ raise ValueError("Must have at least one signer")
+ if self._data is None:
+ raise ValueError("You must add data to sign")
+ options = list(options)
+ if not all(isinstance(x, PKCS7Options) for x in options):
+ raise ValueError("options must be from the PKCS7Options enum")
+ if encoding not in (
+ serialization.Encoding.PEM,
+ serialization.Encoding.DER,
+ serialization.Encoding.SMIME,
+ ):
+ raise ValueError(
+ "Must be PEM, DER, or SMIME from the Encoding enum"
+ )
+
+ # Text is a meaningless option unless it is accompanied by
+ # DetachedSignature
+ if (
+ PKCS7Options.Text in options
+ and PKCS7Options.DetachedSignature not in options
+ ):
+ raise ValueError(
+ "When passing the Text option you must also pass "
+ "DetachedSignature"
+ )
+
+ if PKCS7Options.Text in options and encoding in (
+ serialization.Encoding.DER,
+ serialization.Encoding.PEM,
+ ):
+ raise ValueError(
+ "The Text option is only available for SMIME serialization"
+ )
+
+ # No attributes implies no capabilities so we'll error if you try to
+ # pass both.
+ if (
+ PKCS7Options.NoAttributes in options
+ and PKCS7Options.NoCapabilities in options
+ ):
+ raise ValueError(
+ "NoAttributes is a superset of NoCapabilities. Do not pass "
+ "both values."
+ )
+
+ return rust_pkcs7.sign_and_serialize(self, encoding, options)
+
+
+def _smime_encode(
+ data: bytes, signature: bytes, micalg: str, text_mode: bool
+) -> bytes:
+ # This function works pretty hard to replicate what OpenSSL does
+ # precisely. For good and for ill.
+
+ m = email.message.Message()
+ m.add_header("MIME-Version", "1.0")
+ m.add_header(
+ "Content-Type",
+ "multipart/signed",
+ protocol="application/x-pkcs7-signature",
+ micalg=micalg,
+ )
+
+ m.preamble = "This is an S/MIME signed message\n"
+
+ msg_part = OpenSSLMimePart()
+ msg_part.set_payload(data)
+ if text_mode:
+ msg_part.add_header("Content-Type", "text/plain")
+ m.attach(msg_part)
+
+ sig_part = email.message.MIMEPart()
+ sig_part.add_header(
+ "Content-Type", "application/x-pkcs7-signature", name="smime.p7s"
+ )
+ sig_part.add_header("Content-Transfer-Encoding", "base64")
+ sig_part.add_header(
+ "Content-Disposition", "attachment", filename="smime.p7s"
+ )
+ sig_part.set_payload(
+ email.base64mime.body_encode(signature, maxlinelen=65)
+ )
+ del sig_part["MIME-Version"]
+ m.attach(sig_part)
+
+ fp = io.BytesIO()
+ g = email.generator.BytesGenerator(
+ fp,
+ maxheaderlen=0,
+ mangle_from_=False,
+ policy=m.policy.clone(linesep="\r\n"),
+ )
+ g.flatten(m)
+ return fp.getvalue()
+
+
+class OpenSSLMimePart(email.message.MIMEPart):
+ # A MIMEPart subclass that replicates OpenSSL's behavior of not including
+ # a newline if there are no headers.
+ def _write_headers(self, generator) -> None:
+ if list(self.raw_items()):
+ generator._write_headers(self)
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/ssh.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/ssh.py
new file mode 100644
index 00000000..35e53c10
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/serialization/ssh.py
@@ -0,0 +1,1534 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import binascii
+import enum
+import os
+import re
+import typing
+import warnings
+from base64 import encodebytes as _base64_encode
+from dataclasses import dataclass
+
+from cryptography import utils
+from cryptography.exceptions import UnsupportedAlgorithm
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.asymmetric import (
+ dsa,
+ ec,
+ ed25519,
+ padding,
+ rsa,
+)
+from cryptography.hazmat.primitives.asymmetric import utils as asym_utils
+from cryptography.hazmat.primitives.ciphers import (
+ AEADDecryptionContext,
+ Cipher,
+ algorithms,
+ modes,
+)
+from cryptography.hazmat.primitives.serialization import (
+ Encoding,
+ KeySerializationEncryption,
+ NoEncryption,
+ PrivateFormat,
+ PublicFormat,
+ _KeySerializationEncryption,
+)
+
+try:
+ from bcrypt import kdf as _bcrypt_kdf
+
+ _bcrypt_supported = True
+except ImportError:
+ _bcrypt_supported = False
+
+ def _bcrypt_kdf(
+ password: bytes,
+ salt: bytes,
+ desired_key_bytes: int,
+ rounds: int,
+ ignore_few_rounds: bool = False,
+ ) -> bytes:
+ raise UnsupportedAlgorithm("Need bcrypt module")
+
+
+_SSH_ED25519 = b"ssh-ed25519"
+_SSH_RSA = b"ssh-rsa"
+_SSH_DSA = b"ssh-dss"
+_ECDSA_NISTP256 = b"ecdsa-sha2-nistp256"
+_ECDSA_NISTP384 = b"ecdsa-sha2-nistp384"
+_ECDSA_NISTP521 = b"ecdsa-sha2-nistp521"
+_CERT_SUFFIX = b"-cert-v01@openssh.com"
+
+# These are not key types, only algorithms, so they cannot appear
+# as a public key type
+_SSH_RSA_SHA256 = b"rsa-sha2-256"
+_SSH_RSA_SHA512 = b"rsa-sha2-512"
+
+_SSH_PUBKEY_RC = re.compile(rb"\A(\S+)[ \t]+(\S+)")
+_SK_MAGIC = b"openssh-key-v1\0"
+_SK_START = b"-----BEGIN OPENSSH PRIVATE KEY-----"
+_SK_END = b"-----END OPENSSH PRIVATE KEY-----"
+_BCRYPT = b"bcrypt"
+_NONE = b"none"
+_DEFAULT_CIPHER = b"aes256-ctr"
+_DEFAULT_ROUNDS = 16
+
+# re is only way to work on bytes-like data
+_PEM_RC = re.compile(_SK_START + b"(.*?)" + _SK_END, re.DOTALL)
+
+# padding for max blocksize
+_PADDING = memoryview(bytearray(range(1, 1 + 16)))
+
+
+@dataclass
+class _SSHCipher:
+ alg: typing.Type[algorithms.AES]
+ key_len: int
+ mode: typing.Union[
+ typing.Type[modes.CTR],
+ typing.Type[modes.CBC],
+ typing.Type[modes.GCM],
+ ]
+ block_len: int
+ iv_len: int
+ tag_len: typing.Optional[int]
+ is_aead: bool
+
+
+# ciphers that are actually used in key wrapping
+_SSH_CIPHERS: typing.Dict[bytes, _SSHCipher] = {
+ b"aes256-ctr": _SSHCipher(
+ alg=algorithms.AES,
+ key_len=32,
+ mode=modes.CTR,
+ block_len=16,
+ iv_len=16,
+ tag_len=None,
+ is_aead=False,
+ ),
+ b"aes256-cbc": _SSHCipher(
+ alg=algorithms.AES,
+ key_len=32,
+ mode=modes.CBC,
+ block_len=16,
+ iv_len=16,
+ tag_len=None,
+ is_aead=False,
+ ),
+ b"aes256-gcm@openssh.com": _SSHCipher(
+ alg=algorithms.AES,
+ key_len=32,
+ mode=modes.GCM,
+ block_len=16,
+ iv_len=12,
+ tag_len=16,
+ is_aead=True,
+ ),
+}
+
+# map local curve name to key type
+_ECDSA_KEY_TYPE = {
+ "secp256r1": _ECDSA_NISTP256,
+ "secp384r1": _ECDSA_NISTP384,
+ "secp521r1": _ECDSA_NISTP521,
+}
+
+
+def _get_ssh_key_type(
+ key: typing.Union[SSHPrivateKeyTypes, SSHPublicKeyTypes]
+) -> bytes:
+ if isinstance(key, ec.EllipticCurvePrivateKey):
+ key_type = _ecdsa_key_type(key.public_key())
+ elif isinstance(key, ec.EllipticCurvePublicKey):
+ key_type = _ecdsa_key_type(key)
+ elif isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)):
+ key_type = _SSH_RSA
+ elif isinstance(key, (dsa.DSAPrivateKey, dsa.DSAPublicKey)):
+ key_type = _SSH_DSA
+ elif isinstance(
+ key, (ed25519.Ed25519PrivateKey, ed25519.Ed25519PublicKey)
+ ):
+ key_type = _SSH_ED25519
+ else:
+ raise ValueError("Unsupported key type")
+
+ return key_type
+
+
+def _ecdsa_key_type(public_key: ec.EllipticCurvePublicKey) -> bytes:
+ """Return SSH key_type and curve_name for private key."""
+ curve = public_key.curve
+ if curve.name not in _ECDSA_KEY_TYPE:
+ raise ValueError(
+ f"Unsupported curve for ssh private key: {curve.name!r}"
+ )
+ return _ECDSA_KEY_TYPE[curve.name]
+
+
+def _ssh_pem_encode(
+ data: bytes,
+ prefix: bytes = _SK_START + b"\n",
+ suffix: bytes = _SK_END + b"\n",
+) -> bytes:
+ return b"".join([prefix, _base64_encode(data), suffix])
+
+
+def _check_block_size(data: bytes, block_len: int) -> None:
+ """Require data to be full blocks"""
+ if not data or len(data) % block_len != 0:
+ raise ValueError("Corrupt data: missing padding")
+
+
+def _check_empty(data: bytes) -> None:
+ """All data should have been parsed."""
+ if data:
+ raise ValueError("Corrupt data: unparsed data")
+
+
+def _init_cipher(
+ ciphername: bytes,
+ password: typing.Optional[bytes],
+ salt: bytes,
+ rounds: int,
+) -> Cipher[typing.Union[modes.CBC, modes.CTR, modes.GCM]]:
+ """Generate key + iv and return cipher."""
+ if not password:
+ raise ValueError("Key is password-protected.")
+
+ ciph = _SSH_CIPHERS[ciphername]
+ seed = _bcrypt_kdf(
+ password, salt, ciph.key_len + ciph.iv_len, rounds, True
+ )
+ return Cipher(
+ ciph.alg(seed[: ciph.key_len]),
+ ciph.mode(seed[ciph.key_len :]),
+ )
+
+
+def _get_u32(data: memoryview) -> typing.Tuple[int, memoryview]:
+ """Uint32"""
+ if len(data) < 4:
+ raise ValueError("Invalid data")
+ return int.from_bytes(data[:4], byteorder="big"), data[4:]
+
+
+def _get_u64(data: memoryview) -> typing.Tuple[int, memoryview]:
+ """Uint64"""
+ if len(data) < 8:
+ raise ValueError("Invalid data")
+ return int.from_bytes(data[:8], byteorder="big"), data[8:]
+
+
+def _get_sshstr(data: memoryview) -> typing.Tuple[memoryview, memoryview]:
+ """Bytes with u32 length prefix"""
+ n, data = _get_u32(data)
+ if n > len(data):
+ raise ValueError("Invalid data")
+ return data[:n], data[n:]
+
+
+def _get_mpint(data: memoryview) -> typing.Tuple[int, memoryview]:
+ """Big integer."""
+ val, data = _get_sshstr(data)
+ if val and val[0] > 0x7F:
+ raise ValueError("Invalid data")
+ return int.from_bytes(val, "big"), data
+
+
+def _to_mpint(val: int) -> bytes:
+ """Storage format for signed bigint."""
+ if val < 0:
+ raise ValueError("negative mpint not allowed")
+ if not val:
+ return b""
+ nbytes = (val.bit_length() + 8) // 8
+ return utils.int_to_bytes(val, nbytes)
+
+
+class _FragList:
+ """Build recursive structure without data copy."""
+
+ flist: typing.List[bytes]
+
+ def __init__(
+ self, init: typing.Optional[typing.List[bytes]] = None
+ ) -> None:
+ self.flist = []
+ if init:
+ self.flist.extend(init)
+
+ def put_raw(self, val: bytes) -> None:
+ """Add plain bytes"""
+ self.flist.append(val)
+
+ def put_u32(self, val: int) -> None:
+ """Big-endian uint32"""
+ self.flist.append(val.to_bytes(length=4, byteorder="big"))
+
+ def put_u64(self, val: int) -> None:
+ """Big-endian uint64"""
+ self.flist.append(val.to_bytes(length=8, byteorder="big"))
+
+ def put_sshstr(self, val: typing.Union[bytes, _FragList]) -> None:
+ """Bytes prefixed with u32 length"""
+ if isinstance(val, (bytes, memoryview, bytearray)):
+ self.put_u32(len(val))
+ self.flist.append(val)
+ else:
+ self.put_u32(val.size())
+ self.flist.extend(val.flist)
+
+ def put_mpint(self, val: int) -> None:
+ """Big-endian bigint prefixed with u32 length"""
+ self.put_sshstr(_to_mpint(val))
+
+ def size(self) -> int:
+ """Current number of bytes"""
+ return sum(map(len, self.flist))
+
+ def render(self, dstbuf: memoryview, pos: int = 0) -> int:
+ """Write into bytearray"""
+ for frag in self.flist:
+ flen = len(frag)
+ start, pos = pos, pos + flen
+ dstbuf[start:pos] = frag
+ return pos
+
+ def tobytes(self) -> bytes:
+ """Return as bytes"""
+ buf = memoryview(bytearray(self.size()))
+ self.render(buf)
+ return buf.tobytes()
+
+
+class _SSHFormatRSA:
+ """Format for RSA keys.
+
+ Public:
+ mpint e, n
+ Private:
+ mpint n, e, d, iqmp, p, q
+ """
+
+ def get_public(self, data: memoryview):
+ """RSA public fields"""
+ e, data = _get_mpint(data)
+ n, data = _get_mpint(data)
+ return (e, n), data
+
+ def load_public(
+ self, data: memoryview
+ ) -> typing.Tuple[rsa.RSAPublicKey, memoryview]:
+ """Make RSA public key from data."""
+ (e, n), data = self.get_public(data)
+ public_numbers = rsa.RSAPublicNumbers(e, n)
+ public_key = public_numbers.public_key()
+ return public_key, data
+
+ def load_private(
+ self, data: memoryview, pubfields
+ ) -> typing.Tuple[rsa.RSAPrivateKey, memoryview]:
+ """Make RSA private key from data."""
+ n, data = _get_mpint(data)
+ e, data = _get_mpint(data)
+ d, data = _get_mpint(data)
+ iqmp, data = _get_mpint(data)
+ p, data = _get_mpint(data)
+ q, data = _get_mpint(data)
+
+ if (e, n) != pubfields:
+ raise ValueError("Corrupt data: rsa field mismatch")
+ dmp1 = rsa.rsa_crt_dmp1(d, p)
+ dmq1 = rsa.rsa_crt_dmq1(d, q)
+ public_numbers = rsa.RSAPublicNumbers(e, n)
+ private_numbers = rsa.RSAPrivateNumbers(
+ p, q, d, dmp1, dmq1, iqmp, public_numbers
+ )
+ private_key = private_numbers.private_key()
+ return private_key, data
+
+ def encode_public(
+ self, public_key: rsa.RSAPublicKey, f_pub: _FragList
+ ) -> None:
+ """Write RSA public key"""
+ pubn = public_key.public_numbers()
+ f_pub.put_mpint(pubn.e)
+ f_pub.put_mpint(pubn.n)
+
+ def encode_private(
+ self, private_key: rsa.RSAPrivateKey, f_priv: _FragList
+ ) -> None:
+ """Write RSA private key"""
+ private_numbers = private_key.private_numbers()
+ public_numbers = private_numbers.public_numbers
+
+ f_priv.put_mpint(public_numbers.n)
+ f_priv.put_mpint(public_numbers.e)
+
+ f_priv.put_mpint(private_numbers.d)
+ f_priv.put_mpint(private_numbers.iqmp)
+ f_priv.put_mpint(private_numbers.p)
+ f_priv.put_mpint(private_numbers.q)
+
+
+class _SSHFormatDSA:
+ """Format for DSA keys.
+
+ Public:
+ mpint p, q, g, y
+ Private:
+ mpint p, q, g, y, x
+ """
+
+ def get_public(
+ self, data: memoryview
+ ) -> typing.Tuple[typing.Tuple, memoryview]:
+ """DSA public fields"""
+ p, data = _get_mpint(data)
+ q, data = _get_mpint(data)
+ g, data = _get_mpint(data)
+ y, data = _get_mpint(data)
+ return (p, q, g, y), data
+
+ def load_public(
+ self, data: memoryview
+ ) -> typing.Tuple[dsa.DSAPublicKey, memoryview]:
+ """Make DSA public key from data."""
+ (p, q, g, y), data = self.get_public(data)
+ parameter_numbers = dsa.DSAParameterNumbers(p, q, g)
+ public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers)
+ self._validate(public_numbers)
+ public_key = public_numbers.public_key()
+ return public_key, data
+
+ def load_private(
+ self, data: memoryview, pubfields
+ ) -> typing.Tuple[dsa.DSAPrivateKey, memoryview]:
+ """Make DSA private key from data."""
+ (p, q, g, y), data = self.get_public(data)
+ x, data = _get_mpint(data)
+
+ if (p, q, g, y) != pubfields:
+ raise ValueError("Corrupt data: dsa field mismatch")
+ parameter_numbers = dsa.DSAParameterNumbers(p, q, g)
+ public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers)
+ self._validate(public_numbers)
+ private_numbers = dsa.DSAPrivateNumbers(x, public_numbers)
+ private_key = private_numbers.private_key()
+ return private_key, data
+
+ def encode_public(
+ self, public_key: dsa.DSAPublicKey, f_pub: _FragList
+ ) -> None:
+ """Write DSA public key"""
+ public_numbers = public_key.public_numbers()
+ parameter_numbers = public_numbers.parameter_numbers
+ self._validate(public_numbers)
+
+ f_pub.put_mpint(parameter_numbers.p)
+ f_pub.put_mpint(parameter_numbers.q)
+ f_pub.put_mpint(parameter_numbers.g)
+ f_pub.put_mpint(public_numbers.y)
+
+ def encode_private(
+ self, private_key: dsa.DSAPrivateKey, f_priv: _FragList
+ ) -> None:
+ """Write DSA private key"""
+ self.encode_public(private_key.public_key(), f_priv)
+ f_priv.put_mpint(private_key.private_numbers().x)
+
+ def _validate(self, public_numbers: dsa.DSAPublicNumbers) -> None:
+ parameter_numbers = public_numbers.parameter_numbers
+ if parameter_numbers.p.bit_length() != 1024:
+ raise ValueError("SSH supports only 1024 bit DSA keys")
+
+
+class _SSHFormatECDSA:
+ """Format for ECDSA keys.
+
+ Public:
+ str curve
+ bytes point
+ Private:
+ str curve
+ bytes point
+ mpint secret
+ """
+
+ def __init__(self, ssh_curve_name: bytes, curve: ec.EllipticCurve):
+ self.ssh_curve_name = ssh_curve_name
+ self.curve = curve
+
+ def get_public(
+ self, data: memoryview
+ ) -> typing.Tuple[typing.Tuple, memoryview]:
+ """ECDSA public fields"""
+ curve, data = _get_sshstr(data)
+ point, data = _get_sshstr(data)
+ if curve != self.ssh_curve_name:
+ raise ValueError("Curve name mismatch")
+ if point[0] != 4:
+ raise NotImplementedError("Need uncompressed point")
+ return (curve, point), data
+
+ def load_public(
+ self, data: memoryview
+ ) -> typing.Tuple[ec.EllipticCurvePublicKey, memoryview]:
+ """Make ECDSA public key from data."""
+ (curve_name, point), data = self.get_public(data)
+ public_key = ec.EllipticCurvePublicKey.from_encoded_point(
+ self.curve, point.tobytes()
+ )
+ return public_key, data
+
+ def load_private(
+ self, data: memoryview, pubfields
+ ) -> typing.Tuple[ec.EllipticCurvePrivateKey, memoryview]:
+ """Make ECDSA private key from data."""
+ (curve_name, point), data = self.get_public(data)
+ secret, data = _get_mpint(data)
+
+ if (curve_name, point) != pubfields:
+ raise ValueError("Corrupt data: ecdsa field mismatch")
+ private_key = ec.derive_private_key(secret, self.curve)
+ return private_key, data
+
+ def encode_public(
+ self, public_key: ec.EllipticCurvePublicKey, f_pub: _FragList
+ ) -> None:
+ """Write ECDSA public key"""
+ point = public_key.public_bytes(
+ Encoding.X962, PublicFormat.UncompressedPoint
+ )
+ f_pub.put_sshstr(self.ssh_curve_name)
+ f_pub.put_sshstr(point)
+
+ def encode_private(
+ self, private_key: ec.EllipticCurvePrivateKey, f_priv: _FragList
+ ) -> None:
+ """Write ECDSA private key"""
+ public_key = private_key.public_key()
+ private_numbers = private_key.private_numbers()
+
+ self.encode_public(public_key, f_priv)
+ f_priv.put_mpint(private_numbers.private_value)
+
+
+class _SSHFormatEd25519:
+ """Format for Ed25519 keys.
+
+ Public:
+ bytes point
+ Private:
+ bytes point
+ bytes secret_and_point
+ """
+
+ def get_public(
+ self, data: memoryview
+ ) -> typing.Tuple[typing.Tuple, memoryview]:
+ """Ed25519 public fields"""
+ point, data = _get_sshstr(data)
+ return (point,), data
+
+ def load_public(
+ self, data: memoryview
+ ) -> typing.Tuple[ed25519.Ed25519PublicKey, memoryview]:
+ """Make Ed25519 public key from data."""
+ (point,), data = self.get_public(data)
+ public_key = ed25519.Ed25519PublicKey.from_public_bytes(
+ point.tobytes()
+ )
+ return public_key, data
+
+ def load_private(
+ self, data: memoryview, pubfields
+ ) -> typing.Tuple[ed25519.Ed25519PrivateKey, memoryview]:
+ """Make Ed25519 private key from data."""
+ (point,), data = self.get_public(data)
+ keypair, data = _get_sshstr(data)
+
+ secret = keypair[:32]
+ point2 = keypair[32:]
+ if point != point2 or (point,) != pubfields:
+ raise ValueError("Corrupt data: ed25519 field mismatch")
+ private_key = ed25519.Ed25519PrivateKey.from_private_bytes(secret)
+ return private_key, data
+
+ def encode_public(
+ self, public_key: ed25519.Ed25519PublicKey, f_pub: _FragList
+ ) -> None:
+ """Write Ed25519 public key"""
+ raw_public_key = public_key.public_bytes(
+ Encoding.Raw, PublicFormat.Raw
+ )
+ f_pub.put_sshstr(raw_public_key)
+
+ def encode_private(
+ self, private_key: ed25519.Ed25519PrivateKey, f_priv: _FragList
+ ) -> None:
+ """Write Ed25519 private key"""
+ public_key = private_key.public_key()
+ raw_private_key = private_key.private_bytes(
+ Encoding.Raw, PrivateFormat.Raw, NoEncryption()
+ )
+ raw_public_key = public_key.public_bytes(
+ Encoding.Raw, PublicFormat.Raw
+ )
+ f_keypair = _FragList([raw_private_key, raw_public_key])
+
+ self.encode_public(public_key, f_priv)
+ f_priv.put_sshstr(f_keypair)
+
+
+_KEY_FORMATS = {
+ _SSH_RSA: _SSHFormatRSA(),
+ _SSH_DSA: _SSHFormatDSA(),
+ _SSH_ED25519: _SSHFormatEd25519(),
+ _ECDSA_NISTP256: _SSHFormatECDSA(b"nistp256", ec.SECP256R1()),
+ _ECDSA_NISTP384: _SSHFormatECDSA(b"nistp384", ec.SECP384R1()),
+ _ECDSA_NISTP521: _SSHFormatECDSA(b"nistp521", ec.SECP521R1()),
+}
+
+
+def _lookup_kformat(key_type: bytes):
+ """Return valid format or throw error"""
+ if not isinstance(key_type, bytes):
+ key_type = memoryview(key_type).tobytes()
+ if key_type in _KEY_FORMATS:
+ return _KEY_FORMATS[key_type]
+ raise UnsupportedAlgorithm(f"Unsupported key type: {key_type!r}")
+
+
+SSHPrivateKeyTypes = typing.Union[
+ ec.EllipticCurvePrivateKey,
+ rsa.RSAPrivateKey,
+ dsa.DSAPrivateKey,
+ ed25519.Ed25519PrivateKey,
+]
+
+
+def load_ssh_private_key(
+ data: bytes,
+ password: typing.Optional[bytes],
+ backend: typing.Any = None,
+) -> SSHPrivateKeyTypes:
+ """Load private key from OpenSSH custom encoding."""
+ utils._check_byteslike("data", data)
+ if password is not None:
+ utils._check_bytes("password", password)
+
+ m = _PEM_RC.search(data)
+ if not m:
+ raise ValueError("Not OpenSSH private key format")
+ p1 = m.start(1)
+ p2 = m.end(1)
+ data = binascii.a2b_base64(memoryview(data)[p1:p2])
+ if not data.startswith(_SK_MAGIC):
+ raise ValueError("Not OpenSSH private key format")
+ data = memoryview(data)[len(_SK_MAGIC) :]
+
+ # parse header
+ ciphername, data = _get_sshstr(data)
+ kdfname, data = _get_sshstr(data)
+ kdfoptions, data = _get_sshstr(data)
+ nkeys, data = _get_u32(data)
+ if nkeys != 1:
+ raise ValueError("Only one key supported")
+
+ # load public key data
+ pubdata, data = _get_sshstr(data)
+ pub_key_type, pubdata = _get_sshstr(pubdata)
+ kformat = _lookup_kformat(pub_key_type)
+ pubfields, pubdata = kformat.get_public(pubdata)
+ _check_empty(pubdata)
+
+ if (ciphername, kdfname) != (_NONE, _NONE):
+ ciphername_bytes = ciphername.tobytes()
+ if ciphername_bytes not in _SSH_CIPHERS:
+ raise UnsupportedAlgorithm(
+ f"Unsupported cipher: {ciphername_bytes!r}"
+ )
+ if kdfname != _BCRYPT:
+ raise UnsupportedAlgorithm(f"Unsupported KDF: {kdfname!r}")
+ blklen = _SSH_CIPHERS[ciphername_bytes].block_len
+ tag_len = _SSH_CIPHERS[ciphername_bytes].tag_len
+ # load secret data
+ edata, data = _get_sshstr(data)
+ # see https://bugzilla.mindrot.org/show_bug.cgi?id=3553 for
+ # information about how OpenSSH handles AEAD tags
+ if _SSH_CIPHERS[ciphername_bytes].is_aead:
+ tag = bytes(data)
+ if len(tag) != tag_len:
+ raise ValueError("Corrupt data: invalid tag length for cipher")
+ else:
+ _check_empty(data)
+ _check_block_size(edata, blklen)
+ salt, kbuf = _get_sshstr(kdfoptions)
+ rounds, kbuf = _get_u32(kbuf)
+ _check_empty(kbuf)
+ ciph = _init_cipher(ciphername_bytes, password, salt.tobytes(), rounds)
+ dec = ciph.decryptor()
+ edata = memoryview(dec.update(edata))
+ if _SSH_CIPHERS[ciphername_bytes].is_aead:
+ assert isinstance(dec, AEADDecryptionContext)
+ _check_empty(dec.finalize_with_tag(tag))
+ else:
+ # _check_block_size requires data to be a full block so there
+ # should be no output from finalize
+ _check_empty(dec.finalize())
+ else:
+ # load secret data
+ edata, data = _get_sshstr(data)
+ _check_empty(data)
+ blklen = 8
+ _check_block_size(edata, blklen)
+ ck1, edata = _get_u32(edata)
+ ck2, edata = _get_u32(edata)
+ if ck1 != ck2:
+ raise ValueError("Corrupt data: broken checksum")
+
+ # load per-key struct
+ key_type, edata = _get_sshstr(edata)
+ if key_type != pub_key_type:
+ raise ValueError("Corrupt data: key type mismatch")
+ private_key, edata = kformat.load_private(edata, pubfields)
+ comment, edata = _get_sshstr(edata)
+
+ # yes, SSH does padding check *after* all other parsing is done.
+ # need to follow as it writes zero-byte padding too.
+ if edata != _PADDING[: len(edata)]:
+ raise ValueError("Corrupt data: invalid padding")
+
+ if isinstance(private_key, dsa.DSAPrivateKey):
+ warnings.warn(
+ "SSH DSA keys are deprecated and will be removed in a future "
+ "release.",
+ utils.DeprecatedIn40,
+ stacklevel=2,
+ )
+
+ return private_key
+
+
+def _serialize_ssh_private_key(
+ private_key: SSHPrivateKeyTypes,
+ password: bytes,
+ encryption_algorithm: KeySerializationEncryption,
+) -> bytes:
+ """Serialize private key with OpenSSH custom encoding."""
+ utils._check_bytes("password", password)
+ if isinstance(private_key, dsa.DSAPrivateKey):
+ warnings.warn(
+ "SSH DSA key support is deprecated and will be "
+ "removed in a future release",
+ utils.DeprecatedIn40,
+ stacklevel=4,
+ )
+
+ key_type = _get_ssh_key_type(private_key)
+ kformat = _lookup_kformat(key_type)
+
+ # setup parameters
+ f_kdfoptions = _FragList()
+ if password:
+ ciphername = _DEFAULT_CIPHER
+ blklen = _SSH_CIPHERS[ciphername].block_len
+ kdfname = _BCRYPT
+ rounds = _DEFAULT_ROUNDS
+ if (
+ isinstance(encryption_algorithm, _KeySerializationEncryption)
+ and encryption_algorithm._kdf_rounds is not None
+ ):
+ rounds = encryption_algorithm._kdf_rounds
+ salt = os.urandom(16)
+ f_kdfoptions.put_sshstr(salt)
+ f_kdfoptions.put_u32(rounds)
+ ciph = _init_cipher(ciphername, password, salt, rounds)
+ else:
+ ciphername = kdfname = _NONE
+ blklen = 8
+ ciph = None
+ nkeys = 1
+ checkval = os.urandom(4)
+ comment = b""
+
+ # encode public and private parts together
+ f_public_key = _FragList()
+ f_public_key.put_sshstr(key_type)
+ kformat.encode_public(private_key.public_key(), f_public_key)
+
+ f_secrets = _FragList([checkval, checkval])
+ f_secrets.put_sshstr(key_type)
+ kformat.encode_private(private_key, f_secrets)
+ f_secrets.put_sshstr(comment)
+ f_secrets.put_raw(_PADDING[: blklen - (f_secrets.size() % blklen)])
+
+ # top-level structure
+ f_main = _FragList()
+ f_main.put_raw(_SK_MAGIC)
+ f_main.put_sshstr(ciphername)
+ f_main.put_sshstr(kdfname)
+ f_main.put_sshstr(f_kdfoptions)
+ f_main.put_u32(nkeys)
+ f_main.put_sshstr(f_public_key)
+ f_main.put_sshstr(f_secrets)
+
+ # copy result info bytearray
+ slen = f_secrets.size()
+ mlen = f_main.size()
+ buf = memoryview(bytearray(mlen + blklen))
+ f_main.render(buf)
+ ofs = mlen - slen
+
+ # encrypt in-place
+ if ciph is not None:
+ ciph.encryptor().update_into(buf[ofs:mlen], buf[ofs:])
+
+ return _ssh_pem_encode(buf[:mlen])
+
+
+SSHPublicKeyTypes = typing.Union[
+ ec.EllipticCurvePublicKey,
+ rsa.RSAPublicKey,
+ dsa.DSAPublicKey,
+ ed25519.Ed25519PublicKey,
+]
+
+SSHCertPublicKeyTypes = typing.Union[
+ ec.EllipticCurvePublicKey,
+ rsa.RSAPublicKey,
+ ed25519.Ed25519PublicKey,
+]
+
+
+class SSHCertificateType(enum.Enum):
+ USER = 1
+ HOST = 2
+
+
+class SSHCertificate:
+ def __init__(
+ self,
+ _nonce: memoryview,
+ _public_key: SSHPublicKeyTypes,
+ _serial: int,
+ _cctype: int,
+ _key_id: memoryview,
+ _valid_principals: typing.List[bytes],
+ _valid_after: int,
+ _valid_before: int,
+ _critical_options: typing.Dict[bytes, bytes],
+ _extensions: typing.Dict[bytes, bytes],
+ _sig_type: memoryview,
+ _sig_key: memoryview,
+ _inner_sig_type: memoryview,
+ _signature: memoryview,
+ _tbs_cert_body: memoryview,
+ _cert_key_type: bytes,
+ _cert_body: memoryview,
+ ):
+ self._nonce = _nonce
+ self._public_key = _public_key
+ self._serial = _serial
+ try:
+ self._type = SSHCertificateType(_cctype)
+ except ValueError:
+ raise ValueError("Invalid certificate type")
+ self._key_id = _key_id
+ self._valid_principals = _valid_principals
+ self._valid_after = _valid_after
+ self._valid_before = _valid_before
+ self._critical_options = _critical_options
+ self._extensions = _extensions
+ self._sig_type = _sig_type
+ self._sig_key = _sig_key
+ self._inner_sig_type = _inner_sig_type
+ self._signature = _signature
+ self._cert_key_type = _cert_key_type
+ self._cert_body = _cert_body
+ self._tbs_cert_body = _tbs_cert_body
+
+ @property
+ def nonce(self) -> bytes:
+ return bytes(self._nonce)
+
+ def public_key(self) -> SSHCertPublicKeyTypes:
+ # make mypy happy until we remove DSA support entirely and
+ # the underlying union won't have a disallowed type
+ return typing.cast(SSHCertPublicKeyTypes, self._public_key)
+
+ @property
+ def serial(self) -> int:
+ return self._serial
+
+ @property
+ def type(self) -> SSHCertificateType:
+ return self._type
+
+ @property
+ def key_id(self) -> bytes:
+ return bytes(self._key_id)
+
+ @property
+ def valid_principals(self) -> typing.List[bytes]:
+ return self._valid_principals
+
+ @property
+ def valid_before(self) -> int:
+ return self._valid_before
+
+ @property
+ def valid_after(self) -> int:
+ return self._valid_after
+
+ @property
+ def critical_options(self) -> typing.Dict[bytes, bytes]:
+ return self._critical_options
+
+ @property
+ def extensions(self) -> typing.Dict[bytes, bytes]:
+ return self._extensions
+
+ def signature_key(self) -> SSHCertPublicKeyTypes:
+ sigformat = _lookup_kformat(self._sig_type)
+ signature_key, sigkey_rest = sigformat.load_public(self._sig_key)
+ _check_empty(sigkey_rest)
+ return signature_key
+
+ def public_bytes(self) -> bytes:
+ return (
+ bytes(self._cert_key_type)
+ + b" "
+ + binascii.b2a_base64(bytes(self._cert_body), newline=False)
+ )
+
+ def verify_cert_signature(self) -> None:
+ signature_key = self.signature_key()
+ if isinstance(signature_key, ed25519.Ed25519PublicKey):
+ signature_key.verify(
+ bytes(self._signature), bytes(self._tbs_cert_body)
+ )
+ elif isinstance(signature_key, ec.EllipticCurvePublicKey):
+ # The signature is encoded as a pair of big-endian integers
+ r, data = _get_mpint(self._signature)
+ s, data = _get_mpint(data)
+ _check_empty(data)
+ computed_sig = asym_utils.encode_dss_signature(r, s)
+ hash_alg = _get_ec_hash_alg(signature_key.curve)
+ signature_key.verify(
+ computed_sig, bytes(self._tbs_cert_body), ec.ECDSA(hash_alg)
+ )
+ else:
+ assert isinstance(signature_key, rsa.RSAPublicKey)
+ if self._inner_sig_type == _SSH_RSA:
+ hash_alg = hashes.SHA1()
+ elif self._inner_sig_type == _SSH_RSA_SHA256:
+ hash_alg = hashes.SHA256()
+ else:
+ assert self._inner_sig_type == _SSH_RSA_SHA512
+ hash_alg = hashes.SHA512()
+ signature_key.verify(
+ bytes(self._signature),
+ bytes(self._tbs_cert_body),
+ padding.PKCS1v15(),
+ hash_alg,
+ )
+
+
+def _get_ec_hash_alg(curve: ec.EllipticCurve) -> hashes.HashAlgorithm:
+ if isinstance(curve, ec.SECP256R1):
+ return hashes.SHA256()
+ elif isinstance(curve, ec.SECP384R1):
+ return hashes.SHA384()
+ else:
+ assert isinstance(curve, ec.SECP521R1)
+ return hashes.SHA512()
+
+
+def _load_ssh_public_identity(
+ data: bytes,
+ _legacy_dsa_allowed=False,
+) -> typing.Union[SSHCertificate, SSHPublicKeyTypes]:
+ utils._check_byteslike("data", data)
+
+ m = _SSH_PUBKEY_RC.match(data)
+ if not m:
+ raise ValueError("Invalid line format")
+ key_type = orig_key_type = m.group(1)
+ key_body = m.group(2)
+ with_cert = False
+ if key_type.endswith(_CERT_SUFFIX):
+ with_cert = True
+ key_type = key_type[: -len(_CERT_SUFFIX)]
+ if key_type == _SSH_DSA and not _legacy_dsa_allowed:
+ raise UnsupportedAlgorithm(
+ "DSA keys aren't supported in SSH certificates"
+ )
+ kformat = _lookup_kformat(key_type)
+
+ try:
+ rest = memoryview(binascii.a2b_base64(key_body))
+ except (TypeError, binascii.Error):
+ raise ValueError("Invalid format")
+
+ if with_cert:
+ cert_body = rest
+ inner_key_type, rest = _get_sshstr(rest)
+ if inner_key_type != orig_key_type:
+ raise ValueError("Invalid key format")
+ if with_cert:
+ nonce, rest = _get_sshstr(rest)
+ public_key, rest = kformat.load_public(rest)
+ if with_cert:
+ serial, rest = _get_u64(rest)
+ cctype, rest = _get_u32(rest)
+ key_id, rest = _get_sshstr(rest)
+ principals, rest = _get_sshstr(rest)
+ valid_principals = []
+ while principals:
+ principal, principals = _get_sshstr(principals)
+ valid_principals.append(bytes(principal))
+ valid_after, rest = _get_u64(rest)
+ valid_before, rest = _get_u64(rest)
+ crit_options, rest = _get_sshstr(rest)
+ critical_options = _parse_exts_opts(crit_options)
+ exts, rest = _get_sshstr(rest)
+ extensions = _parse_exts_opts(exts)
+ # Get the reserved field, which is unused.
+ _, rest = _get_sshstr(rest)
+ sig_key_raw, rest = _get_sshstr(rest)
+ sig_type, sig_key = _get_sshstr(sig_key_raw)
+ if sig_type == _SSH_DSA and not _legacy_dsa_allowed:
+ raise UnsupportedAlgorithm(
+ "DSA signatures aren't supported in SSH certificates"
+ )
+ # Get the entire cert body and subtract the signature
+ tbs_cert_body = cert_body[: -len(rest)]
+ signature_raw, rest = _get_sshstr(rest)
+ _check_empty(rest)
+ inner_sig_type, sig_rest = _get_sshstr(signature_raw)
+ # RSA certs can have multiple algorithm types
+ if (
+ sig_type == _SSH_RSA
+ and inner_sig_type
+ not in [_SSH_RSA_SHA256, _SSH_RSA_SHA512, _SSH_RSA]
+ ) or (sig_type != _SSH_RSA and inner_sig_type != sig_type):
+ raise ValueError("Signature key type does not match")
+ signature, sig_rest = _get_sshstr(sig_rest)
+ _check_empty(sig_rest)
+ return SSHCertificate(
+ nonce,
+ public_key,
+ serial,
+ cctype,
+ key_id,
+ valid_principals,
+ valid_after,
+ valid_before,
+ critical_options,
+ extensions,
+ sig_type,
+ sig_key,
+ inner_sig_type,
+ signature,
+ tbs_cert_body,
+ orig_key_type,
+ cert_body,
+ )
+ else:
+ _check_empty(rest)
+ return public_key
+
+
+def load_ssh_public_identity(
+ data: bytes,
+) -> typing.Union[SSHCertificate, SSHPublicKeyTypes]:
+ return _load_ssh_public_identity(data)
+
+
+def _parse_exts_opts(exts_opts: memoryview) -> typing.Dict[bytes, bytes]:
+ result: typing.Dict[bytes, bytes] = {}
+ last_name = None
+ while exts_opts:
+ name, exts_opts = _get_sshstr(exts_opts)
+ bname: bytes = bytes(name)
+ if bname in result:
+ raise ValueError("Duplicate name")
+ if last_name is not None and bname < last_name:
+ raise ValueError("Fields not lexically sorted")
+ value, exts_opts = _get_sshstr(exts_opts)
+ if len(value) > 0:
+ try:
+ value, extra = _get_sshstr(value)
+ except ValueError:
+ warnings.warn(
+ "This certificate has an incorrect encoding for critical "
+ "options or extensions. This will be an exception in "
+ "cryptography 42",
+ utils.DeprecatedIn41,
+ stacklevel=4,
+ )
+ else:
+ if len(extra) > 0:
+ raise ValueError("Unexpected extra data after value")
+ result[bname] = bytes(value)
+ last_name = bname
+ return result
+
+
+def load_ssh_public_key(
+ data: bytes, backend: typing.Any = None
+) -> SSHPublicKeyTypes:
+ cert_or_key = _load_ssh_public_identity(data, _legacy_dsa_allowed=True)
+ public_key: SSHPublicKeyTypes
+ if isinstance(cert_or_key, SSHCertificate):
+ public_key = cert_or_key.public_key()
+ else:
+ public_key = cert_or_key
+
+ if isinstance(public_key, dsa.DSAPublicKey):
+ warnings.warn(
+ "SSH DSA keys are deprecated and will be removed in a future "
+ "release.",
+ utils.DeprecatedIn40,
+ stacklevel=2,
+ )
+ return public_key
+
+
+def serialize_ssh_public_key(public_key: SSHPublicKeyTypes) -> bytes:
+ """One-line public key format for OpenSSH"""
+ if isinstance(public_key, dsa.DSAPublicKey):
+ warnings.warn(
+ "SSH DSA key support is deprecated and will be "
+ "removed in a future release",
+ utils.DeprecatedIn40,
+ stacklevel=4,
+ )
+ key_type = _get_ssh_key_type(public_key)
+ kformat = _lookup_kformat(key_type)
+
+ f_pub = _FragList()
+ f_pub.put_sshstr(key_type)
+ kformat.encode_public(public_key, f_pub)
+
+ pub = binascii.b2a_base64(f_pub.tobytes()).strip()
+ return b"".join([key_type, b" ", pub])
+
+
+SSHCertPrivateKeyTypes = typing.Union[
+ ec.EllipticCurvePrivateKey,
+ rsa.RSAPrivateKey,
+ ed25519.Ed25519PrivateKey,
+]
+
+
+# This is an undocumented limit enforced in the openssh codebase for sshd and
+# ssh-keygen, but it is undefined in the ssh certificates spec.
+_SSHKEY_CERT_MAX_PRINCIPALS = 256
+
+
+class SSHCertificateBuilder:
+ def __init__(
+ self,
+ _public_key: typing.Optional[SSHCertPublicKeyTypes] = None,
+ _serial: typing.Optional[int] = None,
+ _type: typing.Optional[SSHCertificateType] = None,
+ _key_id: typing.Optional[bytes] = None,
+ _valid_principals: typing.List[bytes] = [],
+ _valid_for_all_principals: bool = False,
+ _valid_before: typing.Optional[int] = None,
+ _valid_after: typing.Optional[int] = None,
+ _critical_options: typing.List[typing.Tuple[bytes, bytes]] = [],
+ _extensions: typing.List[typing.Tuple[bytes, bytes]] = [],
+ ):
+ self._public_key = _public_key
+ self._serial = _serial
+ self._type = _type
+ self._key_id = _key_id
+ self._valid_principals = _valid_principals
+ self._valid_for_all_principals = _valid_for_all_principals
+ self._valid_before = _valid_before
+ self._valid_after = _valid_after
+ self._critical_options = _critical_options
+ self._extensions = _extensions
+
+ def public_key(
+ self, public_key: SSHCertPublicKeyTypes
+ ) -> SSHCertificateBuilder:
+ if not isinstance(
+ public_key,
+ (
+ ec.EllipticCurvePublicKey,
+ rsa.RSAPublicKey,
+ ed25519.Ed25519PublicKey,
+ ),
+ ):
+ raise TypeError("Unsupported key type")
+ if self._public_key is not None:
+ raise ValueError("public_key already set")
+
+ return SSHCertificateBuilder(
+ _public_key=public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def serial(self, serial: int) -> SSHCertificateBuilder:
+ if not isinstance(serial, int):
+ raise TypeError("serial must be an integer")
+ if not 0 <= serial < 2**64:
+ raise ValueError("serial must be between 0 and 2**64")
+ if self._serial is not None:
+ raise ValueError("serial already set")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def type(self, type: SSHCertificateType) -> SSHCertificateBuilder:
+ if not isinstance(type, SSHCertificateType):
+ raise TypeError("type must be an SSHCertificateType")
+ if self._type is not None:
+ raise ValueError("type already set")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def key_id(self, key_id: bytes) -> SSHCertificateBuilder:
+ if not isinstance(key_id, bytes):
+ raise TypeError("key_id must be bytes")
+ if self._key_id is not None:
+ raise ValueError("key_id already set")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def valid_principals(
+ self, valid_principals: typing.List[bytes]
+ ) -> SSHCertificateBuilder:
+ if self._valid_for_all_principals:
+ raise ValueError(
+ "Principals can't be set because the cert is valid "
+ "for all principals"
+ )
+ if (
+ not all(isinstance(x, bytes) for x in valid_principals)
+ or not valid_principals
+ ):
+ raise TypeError(
+ "principals must be a list of bytes and can't be empty"
+ )
+ if self._valid_principals:
+ raise ValueError("valid_principals already set")
+
+ if len(valid_principals) > _SSHKEY_CERT_MAX_PRINCIPALS:
+ raise ValueError(
+ "Reached or exceeded the maximum number of valid_principals"
+ )
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def valid_for_all_principals(self):
+ if self._valid_principals:
+ raise ValueError(
+ "valid_principals already set, can't set "
+ "valid_for_all_principals"
+ )
+ if self._valid_for_all_principals:
+ raise ValueError("valid_for_all_principals already set")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=True,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def valid_before(
+ self, valid_before: typing.Union[int, float]
+ ) -> SSHCertificateBuilder:
+ if not isinstance(valid_before, (int, float)):
+ raise TypeError("valid_before must be an int or float")
+ valid_before = int(valid_before)
+ if valid_before < 0 or valid_before >= 2**64:
+ raise ValueError("valid_before must [0, 2**64)")
+ if self._valid_before is not None:
+ raise ValueError("valid_before already set")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def valid_after(
+ self, valid_after: typing.Union[int, float]
+ ) -> SSHCertificateBuilder:
+ if not isinstance(valid_after, (int, float)):
+ raise TypeError("valid_after must be an int or float")
+ valid_after = int(valid_after)
+ if valid_after < 0 or valid_after >= 2**64:
+ raise ValueError("valid_after must [0, 2**64)")
+ if self._valid_after is not None:
+ raise ValueError("valid_after already set")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def add_critical_option(
+ self, name: bytes, value: bytes
+ ) -> SSHCertificateBuilder:
+ if not isinstance(name, bytes) or not isinstance(value, bytes):
+ raise TypeError("name and value must be bytes")
+ # This is O(n**2)
+ if name in [name for name, _ in self._critical_options]:
+ raise ValueError("Duplicate critical option name")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options + [(name, value)],
+ _extensions=self._extensions,
+ )
+
+ def add_extension(
+ self, name: bytes, value: bytes
+ ) -> SSHCertificateBuilder:
+ if not isinstance(name, bytes) or not isinstance(value, bytes):
+ raise TypeError("name and value must be bytes")
+ # This is O(n**2)
+ if name in [name for name, _ in self._extensions]:
+ raise ValueError("Duplicate extension name")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions + [(name, value)],
+ )
+
+ def sign(self, private_key: SSHCertPrivateKeyTypes) -> SSHCertificate:
+ if not isinstance(
+ private_key,
+ (
+ ec.EllipticCurvePrivateKey,
+ rsa.RSAPrivateKey,
+ ed25519.Ed25519PrivateKey,
+ ),
+ ):
+ raise TypeError("Unsupported private key type")
+
+ if self._public_key is None:
+ raise ValueError("public_key must be set")
+
+ # Not required
+ serial = 0 if self._serial is None else self._serial
+
+ if self._type is None:
+ raise ValueError("type must be set")
+
+ # Not required
+ key_id = b"" if self._key_id is None else self._key_id
+
+ # A zero length list is valid, but means the certificate
+ # is valid for any principal of the specified type. We require
+ # the user to explicitly set valid_for_all_principals to get
+ # that behavior.
+ if not self._valid_principals and not self._valid_for_all_principals:
+ raise ValueError(
+ "valid_principals must be set if valid_for_all_principals "
+ "is False"
+ )
+
+ if self._valid_before is None:
+ raise ValueError("valid_before must be set")
+
+ if self._valid_after is None:
+ raise ValueError("valid_after must be set")
+
+ if self._valid_after > self._valid_before:
+ raise ValueError("valid_after must be earlier than valid_before")
+
+ # lexically sort our byte strings
+ self._critical_options.sort(key=lambda x: x[0])
+ self._extensions.sort(key=lambda x: x[0])
+
+ key_type = _get_ssh_key_type(self._public_key)
+ cert_prefix = key_type + _CERT_SUFFIX
+
+ # Marshal the bytes to be signed
+ nonce = os.urandom(32)
+ kformat = _lookup_kformat(key_type)
+ f = _FragList()
+ f.put_sshstr(cert_prefix)
+ f.put_sshstr(nonce)
+ kformat.encode_public(self._public_key, f)
+ f.put_u64(serial)
+ f.put_u32(self._type.value)
+ f.put_sshstr(key_id)
+ fprincipals = _FragList()
+ for p in self._valid_principals:
+ fprincipals.put_sshstr(p)
+ f.put_sshstr(fprincipals.tobytes())
+ f.put_u64(self._valid_after)
+ f.put_u64(self._valid_before)
+ fcrit = _FragList()
+ for name, value in self._critical_options:
+ fcrit.put_sshstr(name)
+ if len(value) > 0:
+ foptval = _FragList()
+ foptval.put_sshstr(value)
+ fcrit.put_sshstr(foptval.tobytes())
+ else:
+ fcrit.put_sshstr(value)
+ f.put_sshstr(fcrit.tobytes())
+ fext = _FragList()
+ for name, value in self._extensions:
+ fext.put_sshstr(name)
+ if len(value) > 0:
+ fextval = _FragList()
+ fextval.put_sshstr(value)
+ fext.put_sshstr(fextval.tobytes())
+ else:
+ fext.put_sshstr(value)
+ f.put_sshstr(fext.tobytes())
+ f.put_sshstr(b"") # RESERVED FIELD
+ # encode CA public key
+ ca_type = _get_ssh_key_type(private_key)
+ caformat = _lookup_kformat(ca_type)
+ caf = _FragList()
+ caf.put_sshstr(ca_type)
+ caformat.encode_public(private_key.public_key(), caf)
+ f.put_sshstr(caf.tobytes())
+ # Sigs according to the rules defined for the CA's public key
+ # (RFC4253 section 6.6 for ssh-rsa, RFC5656 for ECDSA,
+ # and RFC8032 for Ed25519).
+ if isinstance(private_key, ed25519.Ed25519PrivateKey):
+ signature = private_key.sign(f.tobytes())
+ fsig = _FragList()
+ fsig.put_sshstr(ca_type)
+ fsig.put_sshstr(signature)
+ f.put_sshstr(fsig.tobytes())
+ elif isinstance(private_key, ec.EllipticCurvePrivateKey):
+ hash_alg = _get_ec_hash_alg(private_key.curve)
+ signature = private_key.sign(f.tobytes(), ec.ECDSA(hash_alg))
+ r, s = asym_utils.decode_dss_signature(signature)
+ fsig = _FragList()
+ fsig.put_sshstr(ca_type)
+ fsigblob = _FragList()
+ fsigblob.put_mpint(r)
+ fsigblob.put_mpint(s)
+ fsig.put_sshstr(fsigblob.tobytes())
+ f.put_sshstr(fsig.tobytes())
+
+ else:
+ assert isinstance(private_key, rsa.RSAPrivateKey)
+ # Just like Golang, we're going to use SHA512 for RSA
+ # https://cs.opensource.google/go/x/crypto/+/refs/tags/
+ # v0.4.0:ssh/certs.go;l=445
+ # RFC 8332 defines SHA256 and 512 as options
+ fsig = _FragList()
+ fsig.put_sshstr(_SSH_RSA_SHA512)
+ signature = private_key.sign(
+ f.tobytes(), padding.PKCS1v15(), hashes.SHA512()
+ )
+ fsig.put_sshstr(signature)
+ f.put_sshstr(fsig.tobytes())
+
+ cert_data = binascii.b2a_base64(f.tobytes()).strip()
+ # load_ssh_public_identity returns a union, but this is
+ # guaranteed to be an SSHCertificate, so we cast to make
+ # mypy happy.
+ return typing.cast(
+ SSHCertificate,
+ load_ssh_public_identity(b"".join([cert_prefix, b" ", cert_data])),
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py
new file mode 100644
index 00000000..c1af4230
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py
@@ -0,0 +1,9 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+
+class InvalidToken(Exception):
+ pass
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..6a31640b
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc
new file mode 100644
index 00000000..8f0d9e17
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc
new file mode 100644
index 00000000..a4df6144
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py
new file mode 100644
index 00000000..2067108a
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py
@@ -0,0 +1,92 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import base64
+import typing
+from urllib.parse import quote, urlencode
+
+from cryptography.hazmat.primitives import constant_time, hmac
+from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512
+from cryptography.hazmat.primitives.twofactor import InvalidToken
+
+HOTPHashTypes = typing.Union[SHA1, SHA256, SHA512]
+
+
+def _generate_uri(
+ hotp: HOTP,
+ type_name: str,
+ account_name: str,
+ issuer: typing.Optional[str],
+ extra_parameters: typing.List[typing.Tuple[str, int]],
+) -> str:
+ parameters = [
+ ("digits", hotp._length),
+ ("secret", base64.b32encode(hotp._key)),
+ ("algorithm", hotp._algorithm.name.upper()),
+ ]
+
+ if issuer is not None:
+ parameters.append(("issuer", issuer))
+
+ parameters.extend(extra_parameters)
+
+ label = (
+ f"{quote(issuer)}:{quote(account_name)}"
+ if issuer
+ else quote(account_name)
+ )
+ return f"otpauth://{type_name}/{label}?{urlencode(parameters)}"
+
+
+class HOTP:
+ def __init__(
+ self,
+ key: bytes,
+ length: int,
+ algorithm: HOTPHashTypes,
+ backend: typing.Any = None,
+ enforce_key_length: bool = True,
+ ) -> None:
+ if len(key) < 16 and enforce_key_length is True:
+ raise ValueError("Key length has to be at least 128 bits.")
+
+ if not isinstance(length, int):
+ raise TypeError("Length parameter must be an integer type.")
+
+ if length < 6 or length > 8:
+ raise ValueError("Length of HOTP has to be between 6 and 8.")
+
+ if not isinstance(algorithm, (SHA1, SHA256, SHA512)):
+ raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.")
+
+ self._key = key
+ self._length = length
+ self._algorithm = algorithm
+
+ def generate(self, counter: int) -> bytes:
+ truncated_value = self._dynamic_truncate(counter)
+ hotp = truncated_value % (10**self._length)
+ return "{0:0{1}}".format(hotp, self._length).encode()
+
+ def verify(self, hotp: bytes, counter: int) -> None:
+ if not constant_time.bytes_eq(self.generate(counter), hotp):
+ raise InvalidToken("Supplied HOTP value does not match.")
+
+ def _dynamic_truncate(self, counter: int) -> int:
+ ctx = hmac.HMAC(self._key, self._algorithm)
+ ctx.update(counter.to_bytes(length=8, byteorder="big"))
+ hmac_value = ctx.finalize()
+
+ offset = hmac_value[len(hmac_value) - 1] & 0b1111
+ p = hmac_value[offset : offset + 4]
+ return int.from_bytes(p, byteorder="big") & 0x7FFFFFFF
+
+ def get_provisioning_uri(
+ self, account_name: str, counter: int, issuer: typing.Optional[str]
+ ) -> str:
+ return _generate_uri(
+ self, "hotp", account_name, issuer, [("counter", int(counter))]
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/totp.py b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/totp.py
new file mode 100644
index 00000000..daddcea2
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/hazmat/primitives/twofactor/totp.py
@@ -0,0 +1,50 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import typing
+
+from cryptography.hazmat.primitives import constant_time
+from cryptography.hazmat.primitives.twofactor import InvalidToken
+from cryptography.hazmat.primitives.twofactor.hotp import (
+ HOTP,
+ HOTPHashTypes,
+ _generate_uri,
+)
+
+
+class TOTP:
+ def __init__(
+ self,
+ key: bytes,
+ length: int,
+ algorithm: HOTPHashTypes,
+ time_step: int,
+ backend: typing.Any = None,
+ enforce_key_length: bool = True,
+ ):
+ self._time_step = time_step
+ self._hotp = HOTP(
+ key, length, algorithm, enforce_key_length=enforce_key_length
+ )
+
+ def generate(self, time: typing.Union[int, float]) -> bytes:
+ counter = int(time / self._time_step)
+ return self._hotp.generate(counter)
+
+ def verify(self, totp: bytes, time: int) -> None:
+ if not constant_time.bytes_eq(self.generate(time), totp):
+ raise InvalidToken("Supplied TOTP value does not match.")
+
+ def get_provisioning_uri(
+ self, account_name: str, issuer: typing.Optional[str]
+ ) -> str:
+ return _generate_uri(
+ self._hotp,
+ "totp",
+ account_name,
+ issuer,
+ [("period", int(self._time_step))],
+ )
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/py.typed b/Backend/venv/lib/python3.12/site-packages/cryptography/py.typed
new file mode 100644
index 00000000..e69de29b
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/utils.py b/Backend/venv/lib/python3.12/site-packages/cryptography/utils.py
new file mode 100644
index 00000000..71916816
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/utils.py
@@ -0,0 +1,130 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import enum
+import sys
+import types
+import typing
+import warnings
+
+
+# We use a UserWarning subclass, instead of DeprecationWarning, because CPython
+# decided deprecation warnings should be invisble by default.
+class CryptographyDeprecationWarning(UserWarning):
+ pass
+
+
+# Several APIs were deprecated with no specific end-of-life date because of the
+# ubiquity of their use. They should not be removed until we agree on when that
+# cycle ends.
+DeprecatedIn36 = CryptographyDeprecationWarning
+DeprecatedIn37 = CryptographyDeprecationWarning
+DeprecatedIn40 = CryptographyDeprecationWarning
+DeprecatedIn41 = CryptographyDeprecationWarning
+
+
+def _check_bytes(name: str, value: bytes) -> None:
+ if not isinstance(value, bytes):
+ raise TypeError(f"{name} must be bytes")
+
+
+def _check_byteslike(name: str, value: bytes) -> None:
+ try:
+ memoryview(value)
+ except TypeError:
+ raise TypeError(f"{name} must be bytes-like")
+
+
+def int_to_bytes(integer: int, length: typing.Optional[int] = None) -> bytes:
+ return integer.to_bytes(
+ length or (integer.bit_length() + 7) // 8 or 1, "big"
+ )
+
+
+def _extract_buffer_length(obj: typing.Any) -> typing.Tuple[typing.Any, int]:
+ from cryptography.hazmat.bindings._rust import _openssl
+
+ buf = _openssl.ffi.from_buffer(obj)
+ return buf, int(_openssl.ffi.cast("uintptr_t", buf))
+
+
+class InterfaceNotImplemented(Exception):
+ pass
+
+
+class _DeprecatedValue:
+ def __init__(self, value: object, message: str, warning_class):
+ self.value = value
+ self.message = message
+ self.warning_class = warning_class
+
+
+class _ModuleWithDeprecations(types.ModuleType):
+ def __init__(self, module: types.ModuleType):
+ super().__init__(module.__name__)
+ self.__dict__["_module"] = module
+
+ def __getattr__(self, attr: str) -> object:
+ obj = getattr(self._module, attr)
+ if isinstance(obj, _DeprecatedValue):
+ warnings.warn(obj.message, obj.warning_class, stacklevel=2)
+ obj = obj.value
+ return obj
+
+ def __setattr__(self, attr: str, value: object) -> None:
+ setattr(self._module, attr, value)
+
+ def __delattr__(self, attr: str) -> None:
+ obj = getattr(self._module, attr)
+ if isinstance(obj, _DeprecatedValue):
+ warnings.warn(obj.message, obj.warning_class, stacklevel=2)
+
+ delattr(self._module, attr)
+
+ def __dir__(self) -> typing.Sequence[str]:
+ return ["_module"] + dir(self._module)
+
+
+def deprecated(
+ value: object,
+ module_name: str,
+ message: str,
+ warning_class: typing.Type[Warning],
+ name: typing.Optional[str] = None,
+) -> _DeprecatedValue:
+ module = sys.modules[module_name]
+ if not isinstance(module, _ModuleWithDeprecations):
+ sys.modules[module_name] = module = _ModuleWithDeprecations(module)
+ dv = _DeprecatedValue(value, message, warning_class)
+ # Maintain backwards compatibility with `name is None` for pyOpenSSL.
+ if name is not None:
+ setattr(module, name, dv)
+ return dv
+
+
+def cached_property(func: typing.Callable) -> property:
+ cached_name = f"_cached_{func}"
+ sentinel = object()
+
+ def inner(instance: object):
+ cache = getattr(instance, cached_name, sentinel)
+ if cache is not sentinel:
+ return cache
+ result = func(instance)
+ setattr(instance, cached_name, result)
+ return result
+
+ return property(inner)
+
+
+# Python 3.10 changed representation of enums. We use well-defined object
+# representation and string representation from Python 3.9.
+class Enum(enum.Enum):
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__}.{self._name_}: {self._value_!r}>"
+
+ def __str__(self) -> str:
+ return f"{self.__class__.__name__}.{self._name_}"
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__init__.py b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__init__.py
new file mode 100644
index 00000000..d77694a2
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__init__.py
@@ -0,0 +1,255 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+from cryptography.x509 import certificate_transparency
+from cryptography.x509.base import (
+ Attribute,
+ AttributeNotFound,
+ Attributes,
+ Certificate,
+ CertificateBuilder,
+ CertificateRevocationList,
+ CertificateRevocationListBuilder,
+ CertificateSigningRequest,
+ CertificateSigningRequestBuilder,
+ InvalidVersion,
+ RevokedCertificate,
+ RevokedCertificateBuilder,
+ Version,
+ load_der_x509_certificate,
+ load_der_x509_crl,
+ load_der_x509_csr,
+ load_pem_x509_certificate,
+ load_pem_x509_certificates,
+ load_pem_x509_crl,
+ load_pem_x509_csr,
+ random_serial_number,
+)
+from cryptography.x509.extensions import (
+ AccessDescription,
+ AuthorityInformationAccess,
+ AuthorityKeyIdentifier,
+ BasicConstraints,
+ CertificateIssuer,
+ CertificatePolicies,
+ CRLDistributionPoints,
+ CRLNumber,
+ CRLReason,
+ DeltaCRLIndicator,
+ DistributionPoint,
+ DuplicateExtension,
+ ExtendedKeyUsage,
+ Extension,
+ ExtensionNotFound,
+ Extensions,
+ ExtensionType,
+ FreshestCRL,
+ GeneralNames,
+ InhibitAnyPolicy,
+ InvalidityDate,
+ IssuerAlternativeName,
+ IssuingDistributionPoint,
+ KeyUsage,
+ MSCertificateTemplate,
+ NameConstraints,
+ NoticeReference,
+ OCSPAcceptableResponses,
+ OCSPNoCheck,
+ OCSPNonce,
+ PolicyConstraints,
+ PolicyInformation,
+ PrecertificateSignedCertificateTimestamps,
+ PrecertPoison,
+ ReasonFlags,
+ SignedCertificateTimestamps,
+ SubjectAlternativeName,
+ SubjectInformationAccess,
+ SubjectKeyIdentifier,
+ TLSFeature,
+ TLSFeatureType,
+ UnrecognizedExtension,
+ UserNotice,
+)
+from cryptography.x509.general_name import (
+ DirectoryName,
+ DNSName,
+ GeneralName,
+ IPAddress,
+ OtherName,
+ RegisteredID,
+ RFC822Name,
+ UniformResourceIdentifier,
+ UnsupportedGeneralNameType,
+)
+from cryptography.x509.name import (
+ Name,
+ NameAttribute,
+ RelativeDistinguishedName,
+)
+from cryptography.x509.oid import (
+ AuthorityInformationAccessOID,
+ CertificatePoliciesOID,
+ CRLEntryExtensionOID,
+ ExtendedKeyUsageOID,
+ ExtensionOID,
+ NameOID,
+ ObjectIdentifier,
+ SignatureAlgorithmOID,
+)
+
+OID_AUTHORITY_INFORMATION_ACCESS = ExtensionOID.AUTHORITY_INFORMATION_ACCESS
+OID_AUTHORITY_KEY_IDENTIFIER = ExtensionOID.AUTHORITY_KEY_IDENTIFIER
+OID_BASIC_CONSTRAINTS = ExtensionOID.BASIC_CONSTRAINTS
+OID_CERTIFICATE_POLICIES = ExtensionOID.CERTIFICATE_POLICIES
+OID_CRL_DISTRIBUTION_POINTS = ExtensionOID.CRL_DISTRIBUTION_POINTS
+OID_EXTENDED_KEY_USAGE = ExtensionOID.EXTENDED_KEY_USAGE
+OID_FRESHEST_CRL = ExtensionOID.FRESHEST_CRL
+OID_INHIBIT_ANY_POLICY = ExtensionOID.INHIBIT_ANY_POLICY
+OID_ISSUER_ALTERNATIVE_NAME = ExtensionOID.ISSUER_ALTERNATIVE_NAME
+OID_KEY_USAGE = ExtensionOID.KEY_USAGE
+OID_NAME_CONSTRAINTS = ExtensionOID.NAME_CONSTRAINTS
+OID_OCSP_NO_CHECK = ExtensionOID.OCSP_NO_CHECK
+OID_POLICY_CONSTRAINTS = ExtensionOID.POLICY_CONSTRAINTS
+OID_POLICY_MAPPINGS = ExtensionOID.POLICY_MAPPINGS
+OID_SUBJECT_ALTERNATIVE_NAME = ExtensionOID.SUBJECT_ALTERNATIVE_NAME
+OID_SUBJECT_DIRECTORY_ATTRIBUTES = ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES
+OID_SUBJECT_INFORMATION_ACCESS = ExtensionOID.SUBJECT_INFORMATION_ACCESS
+OID_SUBJECT_KEY_IDENTIFIER = ExtensionOID.SUBJECT_KEY_IDENTIFIER
+
+OID_DSA_WITH_SHA1 = SignatureAlgorithmOID.DSA_WITH_SHA1
+OID_DSA_WITH_SHA224 = SignatureAlgorithmOID.DSA_WITH_SHA224
+OID_DSA_WITH_SHA256 = SignatureAlgorithmOID.DSA_WITH_SHA256
+OID_ECDSA_WITH_SHA1 = SignatureAlgorithmOID.ECDSA_WITH_SHA1
+OID_ECDSA_WITH_SHA224 = SignatureAlgorithmOID.ECDSA_WITH_SHA224
+OID_ECDSA_WITH_SHA256 = SignatureAlgorithmOID.ECDSA_WITH_SHA256
+OID_ECDSA_WITH_SHA384 = SignatureAlgorithmOID.ECDSA_WITH_SHA384
+OID_ECDSA_WITH_SHA512 = SignatureAlgorithmOID.ECDSA_WITH_SHA512
+OID_RSA_WITH_MD5 = SignatureAlgorithmOID.RSA_WITH_MD5
+OID_RSA_WITH_SHA1 = SignatureAlgorithmOID.RSA_WITH_SHA1
+OID_RSA_WITH_SHA224 = SignatureAlgorithmOID.RSA_WITH_SHA224
+OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256
+OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384
+OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512
+OID_RSASSA_PSS = SignatureAlgorithmOID.RSASSA_PSS
+
+OID_COMMON_NAME = NameOID.COMMON_NAME
+OID_COUNTRY_NAME = NameOID.COUNTRY_NAME
+OID_DOMAIN_COMPONENT = NameOID.DOMAIN_COMPONENT
+OID_DN_QUALIFIER = NameOID.DN_QUALIFIER
+OID_EMAIL_ADDRESS = NameOID.EMAIL_ADDRESS
+OID_GENERATION_QUALIFIER = NameOID.GENERATION_QUALIFIER
+OID_GIVEN_NAME = NameOID.GIVEN_NAME
+OID_LOCALITY_NAME = NameOID.LOCALITY_NAME
+OID_ORGANIZATIONAL_UNIT_NAME = NameOID.ORGANIZATIONAL_UNIT_NAME
+OID_ORGANIZATION_NAME = NameOID.ORGANIZATION_NAME
+OID_PSEUDONYM = NameOID.PSEUDONYM
+OID_SERIAL_NUMBER = NameOID.SERIAL_NUMBER
+OID_STATE_OR_PROVINCE_NAME = NameOID.STATE_OR_PROVINCE_NAME
+OID_SURNAME = NameOID.SURNAME
+OID_TITLE = NameOID.TITLE
+
+OID_CLIENT_AUTH = ExtendedKeyUsageOID.CLIENT_AUTH
+OID_CODE_SIGNING = ExtendedKeyUsageOID.CODE_SIGNING
+OID_EMAIL_PROTECTION = ExtendedKeyUsageOID.EMAIL_PROTECTION
+OID_OCSP_SIGNING = ExtendedKeyUsageOID.OCSP_SIGNING
+OID_SERVER_AUTH = ExtendedKeyUsageOID.SERVER_AUTH
+OID_TIME_STAMPING = ExtendedKeyUsageOID.TIME_STAMPING
+
+OID_ANY_POLICY = CertificatePoliciesOID.ANY_POLICY
+OID_CPS_QUALIFIER = CertificatePoliciesOID.CPS_QUALIFIER
+OID_CPS_USER_NOTICE = CertificatePoliciesOID.CPS_USER_NOTICE
+
+OID_CERTIFICATE_ISSUER = CRLEntryExtensionOID.CERTIFICATE_ISSUER
+OID_CRL_REASON = CRLEntryExtensionOID.CRL_REASON
+OID_INVALIDITY_DATE = CRLEntryExtensionOID.INVALIDITY_DATE
+
+OID_CA_ISSUERS = AuthorityInformationAccessOID.CA_ISSUERS
+OID_OCSP = AuthorityInformationAccessOID.OCSP
+
+__all__ = [
+ "certificate_transparency",
+ "load_pem_x509_certificate",
+ "load_pem_x509_certificates",
+ "load_der_x509_certificate",
+ "load_pem_x509_csr",
+ "load_der_x509_csr",
+ "load_pem_x509_crl",
+ "load_der_x509_crl",
+ "random_serial_number",
+ "Attribute",
+ "AttributeNotFound",
+ "Attributes",
+ "InvalidVersion",
+ "DeltaCRLIndicator",
+ "DuplicateExtension",
+ "ExtensionNotFound",
+ "UnsupportedGeneralNameType",
+ "NameAttribute",
+ "Name",
+ "RelativeDistinguishedName",
+ "ObjectIdentifier",
+ "ExtensionType",
+ "Extensions",
+ "Extension",
+ "ExtendedKeyUsage",
+ "FreshestCRL",
+ "IssuingDistributionPoint",
+ "TLSFeature",
+ "TLSFeatureType",
+ "OCSPAcceptableResponses",
+ "OCSPNoCheck",
+ "BasicConstraints",
+ "CRLNumber",
+ "KeyUsage",
+ "AuthorityInformationAccess",
+ "SubjectInformationAccess",
+ "AccessDescription",
+ "CertificatePolicies",
+ "PolicyInformation",
+ "UserNotice",
+ "NoticeReference",
+ "SubjectKeyIdentifier",
+ "NameConstraints",
+ "CRLDistributionPoints",
+ "DistributionPoint",
+ "ReasonFlags",
+ "InhibitAnyPolicy",
+ "SubjectAlternativeName",
+ "IssuerAlternativeName",
+ "AuthorityKeyIdentifier",
+ "GeneralNames",
+ "GeneralName",
+ "RFC822Name",
+ "DNSName",
+ "UniformResourceIdentifier",
+ "RegisteredID",
+ "DirectoryName",
+ "IPAddress",
+ "OtherName",
+ "Certificate",
+ "CertificateRevocationList",
+ "CertificateRevocationListBuilder",
+ "CertificateSigningRequest",
+ "RevokedCertificate",
+ "RevokedCertificateBuilder",
+ "CertificateSigningRequestBuilder",
+ "CertificateBuilder",
+ "Version",
+ "OID_CA_ISSUERS",
+ "OID_OCSP",
+ "CertificateIssuer",
+ "CRLReason",
+ "InvalidityDate",
+ "UnrecognizedExtension",
+ "PolicyConstraints",
+ "PrecertificateSignedCertificateTimestamps",
+ "PrecertPoison",
+ "OCSPNonce",
+ "SignedCertificateTimestamps",
+ "SignatureAlgorithmOID",
+ "NameOID",
+ "MSCertificateTemplate",
+]
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/__init__.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 00000000..5588a22a
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/__init__.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/base.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/base.cpython-312.pyc
new file mode 100644
index 00000000..63eba30f
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/base.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc
new file mode 100644
index 00000000..7462f9f7
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/extensions.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/extensions.cpython-312.pyc
new file mode 100644
index 00000000..c6427c1c
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/extensions.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/general_name.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/general_name.cpython-312.pyc
new file mode 100644
index 00000000..ce969bbf
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/general_name.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/name.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/name.cpython-312.pyc
new file mode 100644
index 00000000..c469c22f
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/name.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/ocsp.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/ocsp.cpython-312.pyc
new file mode 100644
index 00000000..0292cc38
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/ocsp.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/oid.cpython-312.pyc b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/oid.cpython-312.pyc
new file mode 100644
index 00000000..3ed64566
Binary files /dev/null and b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/__pycache__/oid.cpython-312.pyc differ
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/x509/base.py b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/base.py
new file mode 100644
index 00000000..576385e0
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/base.py
@@ -0,0 +1,1173 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import datetime
+import os
+import typing
+
+from cryptography import utils
+from cryptography.hazmat.bindings._rust import x509 as rust_x509
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import (
+ dsa,
+ ec,
+ ed448,
+ ed25519,
+ padding,
+ rsa,
+ x448,
+ x25519,
+)
+from cryptography.hazmat.primitives.asymmetric.types import (
+ CertificateIssuerPrivateKeyTypes,
+ CertificateIssuerPublicKeyTypes,
+ CertificatePublicKeyTypes,
+)
+from cryptography.x509.extensions import (
+ Extension,
+ Extensions,
+ ExtensionType,
+ _make_sequence_methods,
+)
+from cryptography.x509.name import Name, _ASN1Type
+from cryptography.x509.oid import ObjectIdentifier
+
+_EARLIEST_UTC_TIME = datetime.datetime(1950, 1, 1)
+
+# This must be kept in sync with sign.rs's list of allowable types in
+# identify_hash_type
+_AllowedHashTypes = typing.Union[
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+ hashes.SHA3_224,
+ hashes.SHA3_256,
+ hashes.SHA3_384,
+ hashes.SHA3_512,
+]
+
+
+class AttributeNotFound(Exception):
+ def __init__(self, msg: str, oid: ObjectIdentifier) -> None:
+ super().__init__(msg)
+ self.oid = oid
+
+
+def _reject_duplicate_extension(
+ extension: Extension[ExtensionType],
+ extensions: typing.List[Extension[ExtensionType]],
+) -> None:
+ # This is quadratic in the number of extensions
+ for e in extensions:
+ if e.oid == extension.oid:
+ raise ValueError("This extension has already been set.")
+
+
+def _reject_duplicate_attribute(
+ oid: ObjectIdentifier,
+ attributes: typing.List[
+ typing.Tuple[ObjectIdentifier, bytes, typing.Optional[int]]
+ ],
+) -> None:
+ # This is quadratic in the number of attributes
+ for attr_oid, _, _ in attributes:
+ if attr_oid == oid:
+ raise ValueError("This attribute has already been set.")
+
+
+def _convert_to_naive_utc_time(time: datetime.datetime) -> datetime.datetime:
+ """Normalizes a datetime to a naive datetime in UTC.
+
+ time -- datetime to normalize. Assumed to be in UTC if not timezone
+ aware.
+ """
+ if time.tzinfo is not None:
+ offset = time.utcoffset()
+ offset = offset if offset else datetime.timedelta()
+ return time.replace(tzinfo=None) - offset
+ else:
+ return time
+
+
+class Attribute:
+ def __init__(
+ self,
+ oid: ObjectIdentifier,
+ value: bytes,
+ _type: int = _ASN1Type.UTF8String.value,
+ ) -> None:
+ self._oid = oid
+ self._value = value
+ self._type = _type
+
+ @property
+ def oid(self) -> ObjectIdentifier:
+ return self._oid
+
+ @property
+ def value(self) -> bytes:
+ return self._value
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Attribute):
+ return NotImplemented
+
+ return (
+ self.oid == other.oid
+ and self.value == other.value
+ and self._type == other._type
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.oid, self.value, self._type))
+
+
+class Attributes:
+ def __init__(
+ self,
+ attributes: typing.Iterable[Attribute],
+ ) -> None:
+ self._attributes = list(attributes)
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_attributes")
+
+ def __repr__(self) -> str:
+ return f""
+
+ def get_attribute_for_oid(self, oid: ObjectIdentifier) -> Attribute:
+ for attr in self:
+ if attr.oid == oid:
+ return attr
+
+ raise AttributeNotFound(f"No {oid} attribute was found", oid)
+
+
+class Version(utils.Enum):
+ v1 = 0
+ v3 = 2
+
+
+class InvalidVersion(Exception):
+ def __init__(self, msg: str, parsed_version: int) -> None:
+ super().__init__(msg)
+ self.parsed_version = parsed_version
+
+
+class Certificate(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes:
+ """
+ Returns bytes using digest passed.
+ """
+
+ @property
+ @abc.abstractmethod
+ def serial_number(self) -> int:
+ """
+ Returns certificate serial number
+ """
+
+ @property
+ @abc.abstractmethod
+ def version(self) -> Version:
+ """
+ Returns the certificate version
+ """
+
+ @abc.abstractmethod
+ def public_key(self) -> CertificatePublicKeyTypes:
+ """
+ Returns the public key
+ """
+
+ @property
+ @abc.abstractmethod
+ def not_valid_before(self) -> datetime.datetime:
+ """
+ Not before time (represented as UTC datetime)
+ """
+
+ @property
+ @abc.abstractmethod
+ def not_valid_after(self) -> datetime.datetime:
+ """
+ Not after time (represented as UTC datetime)
+ """
+
+ @property
+ @abc.abstractmethod
+ def issuer(self) -> Name:
+ """
+ Returns the issuer name object.
+ """
+
+ @property
+ @abc.abstractmethod
+ def subject(self) -> Name:
+ """
+ Returns the subject name object.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_hash_algorithm(
+ self,
+ ) -> typing.Optional[hashes.HashAlgorithm]:
+ """
+ Returns a HashAlgorithm corresponding to the type of the digest signed
+ in the certificate.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_algorithm_oid(self) -> ObjectIdentifier:
+ """
+ Returns the ObjectIdentifier of the signature algorithm.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_algorithm_parameters(
+ self,
+ ) -> typing.Union[None, padding.PSS, padding.PKCS1v15, ec.ECDSA]:
+ """
+ Returns the signature algorithm parameters.
+ """
+
+ @property
+ @abc.abstractmethod
+ def extensions(self) -> Extensions:
+ """
+ Returns an Extensions object.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature(self) -> bytes:
+ """
+ Returns the signature bytes.
+ """
+
+ @property
+ @abc.abstractmethod
+ def tbs_certificate_bytes(self) -> bytes:
+ """
+ Returns the tbsCertificate payload bytes as defined in RFC 5280.
+ """
+
+ @property
+ @abc.abstractmethod
+ def tbs_precertificate_bytes(self) -> bytes:
+ """
+ Returns the tbsCertificate payload bytes with the SCT list extension
+ stripped.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+ @abc.abstractmethod
+ def __hash__(self) -> int:
+ """
+ Computes a hash.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding: serialization.Encoding) -> bytes:
+ """
+ Serializes the certificate to PEM or DER format.
+ """
+
+ @abc.abstractmethod
+ def verify_directly_issued_by(self, issuer: Certificate) -> None:
+ """
+ This method verifies that certificate issuer name matches the
+ issuer subject name and that the certificate is signed by the
+ issuer's private key. No other validation is performed.
+ """
+
+
+# Runtime isinstance checks need this since the rust class is not a subclass.
+Certificate.register(rust_x509.Certificate)
+
+
+class RevokedCertificate(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def serial_number(self) -> int:
+ """
+ Returns the serial number of the revoked certificate.
+ """
+
+ @property
+ @abc.abstractmethod
+ def revocation_date(self) -> datetime.datetime:
+ """
+ Returns the date of when this certificate was revoked.
+ """
+
+ @property
+ @abc.abstractmethod
+ def extensions(self) -> Extensions:
+ """
+ Returns an Extensions object containing a list of Revoked extensions.
+ """
+
+
+# Runtime isinstance checks need this since the rust class is not a subclass.
+RevokedCertificate.register(rust_x509.RevokedCertificate)
+
+
+class _RawRevokedCertificate(RevokedCertificate):
+ def __init__(
+ self,
+ serial_number: int,
+ revocation_date: datetime.datetime,
+ extensions: Extensions,
+ ):
+ self._serial_number = serial_number
+ self._revocation_date = revocation_date
+ self._extensions = extensions
+
+ @property
+ def serial_number(self) -> int:
+ return self._serial_number
+
+ @property
+ def revocation_date(self) -> datetime.datetime:
+ return self._revocation_date
+
+ @property
+ def extensions(self) -> Extensions:
+ return self._extensions
+
+
+class CertificateRevocationList(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def public_bytes(self, encoding: serialization.Encoding) -> bytes:
+ """
+ Serializes the CRL to PEM or DER format.
+ """
+
+ @abc.abstractmethod
+ def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes:
+ """
+ Returns bytes using digest passed.
+ """
+
+ @abc.abstractmethod
+ def get_revoked_certificate_by_serial_number(
+ self, serial_number: int
+ ) -> typing.Optional[RevokedCertificate]:
+ """
+ Returns an instance of RevokedCertificate or None if the serial_number
+ is not in the CRL.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_hash_algorithm(
+ self,
+ ) -> typing.Optional[hashes.HashAlgorithm]:
+ """
+ Returns a HashAlgorithm corresponding to the type of the digest signed
+ in the certificate.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_algorithm_oid(self) -> ObjectIdentifier:
+ """
+ Returns the ObjectIdentifier of the signature algorithm.
+ """
+
+ @property
+ @abc.abstractmethod
+ def issuer(self) -> Name:
+ """
+ Returns the X509Name with the issuer of this CRL.
+ """
+
+ @property
+ @abc.abstractmethod
+ def next_update(self) -> typing.Optional[datetime.datetime]:
+ """
+ Returns the date of next update for this CRL.
+ """
+
+ @property
+ @abc.abstractmethod
+ def last_update(self) -> datetime.datetime:
+ """
+ Returns the date of last update for this CRL.
+ """
+
+ @property
+ @abc.abstractmethod
+ def extensions(self) -> Extensions:
+ """
+ Returns an Extensions object containing a list of CRL extensions.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature(self) -> bytes:
+ """
+ Returns the signature bytes.
+ """
+
+ @property
+ @abc.abstractmethod
+ def tbs_certlist_bytes(self) -> bytes:
+ """
+ Returns the tbsCertList payload bytes as defined in RFC 5280.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+ @abc.abstractmethod
+ def __len__(self) -> int:
+ """
+ Number of revoked certificates in the CRL.
+ """
+
+ @typing.overload
+ def __getitem__(self, idx: int) -> RevokedCertificate:
+ ...
+
+ @typing.overload
+ def __getitem__(self, idx: slice) -> typing.List[RevokedCertificate]:
+ ...
+
+ @abc.abstractmethod
+ def __getitem__(
+ self, idx: typing.Union[int, slice]
+ ) -> typing.Union[RevokedCertificate, typing.List[RevokedCertificate]]:
+ """
+ Returns a revoked certificate (or slice of revoked certificates).
+ """
+
+ @abc.abstractmethod
+ def __iter__(self) -> typing.Iterator[RevokedCertificate]:
+ """
+ Iterator over the revoked certificates
+ """
+
+ @abc.abstractmethod
+ def is_signature_valid(
+ self, public_key: CertificateIssuerPublicKeyTypes
+ ) -> bool:
+ """
+ Verifies signature of revocation list against given public key.
+ """
+
+
+CertificateRevocationList.register(rust_x509.CertificateRevocationList)
+
+
+class CertificateSigningRequest(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+ @abc.abstractmethod
+ def __hash__(self) -> int:
+ """
+ Computes a hash.
+ """
+
+ @abc.abstractmethod
+ def public_key(self) -> CertificatePublicKeyTypes:
+ """
+ Returns the public key
+ """
+
+ @property
+ @abc.abstractmethod
+ def subject(self) -> Name:
+ """
+ Returns the subject name object.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_hash_algorithm(
+ self,
+ ) -> typing.Optional[hashes.HashAlgorithm]:
+ """
+ Returns a HashAlgorithm corresponding to the type of the digest signed
+ in the certificate.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_algorithm_oid(self) -> ObjectIdentifier:
+ """
+ Returns the ObjectIdentifier of the signature algorithm.
+ """
+
+ @property
+ @abc.abstractmethod
+ def extensions(self) -> Extensions:
+ """
+ Returns the extensions in the signing request.
+ """
+
+ @property
+ @abc.abstractmethod
+ def attributes(self) -> Attributes:
+ """
+ Returns an Attributes object.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding: serialization.Encoding) -> bytes:
+ """
+ Encodes the request to PEM or DER format.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature(self) -> bytes:
+ """
+ Returns the signature bytes.
+ """
+
+ @property
+ @abc.abstractmethod
+ def tbs_certrequest_bytes(self) -> bytes:
+ """
+ Returns the PKCS#10 CertificationRequestInfo bytes as defined in RFC
+ 2986.
+ """
+
+ @property
+ @abc.abstractmethod
+ def is_signature_valid(self) -> bool:
+ """
+ Verifies signature of signing request.
+ """
+
+ @abc.abstractmethod
+ def get_attribute_for_oid(self, oid: ObjectIdentifier) -> bytes:
+ """
+ Get the attribute value for a given OID.
+ """
+
+
+# Runtime isinstance checks need this since the rust class is not a subclass.
+CertificateSigningRequest.register(rust_x509.CertificateSigningRequest)
+
+
+# Backend argument preserved for API compatibility, but ignored.
+def load_pem_x509_certificate(
+ data: bytes, backend: typing.Any = None
+) -> Certificate:
+ return rust_x509.load_pem_x509_certificate(data)
+
+
+def load_pem_x509_certificates(data: bytes) -> typing.List[Certificate]:
+ return rust_x509.load_pem_x509_certificates(data)
+
+
+# Backend argument preserved for API compatibility, but ignored.
+def load_der_x509_certificate(
+ data: bytes, backend: typing.Any = None
+) -> Certificate:
+ return rust_x509.load_der_x509_certificate(data)
+
+
+# Backend argument preserved for API compatibility, but ignored.
+def load_pem_x509_csr(
+ data: bytes, backend: typing.Any = None
+) -> CertificateSigningRequest:
+ return rust_x509.load_pem_x509_csr(data)
+
+
+# Backend argument preserved for API compatibility, but ignored.
+def load_der_x509_csr(
+ data: bytes, backend: typing.Any = None
+) -> CertificateSigningRequest:
+ return rust_x509.load_der_x509_csr(data)
+
+
+# Backend argument preserved for API compatibility, but ignored.
+def load_pem_x509_crl(
+ data: bytes, backend: typing.Any = None
+) -> CertificateRevocationList:
+ return rust_x509.load_pem_x509_crl(data)
+
+
+# Backend argument preserved for API compatibility, but ignored.
+def load_der_x509_crl(
+ data: bytes, backend: typing.Any = None
+) -> CertificateRevocationList:
+ return rust_x509.load_der_x509_crl(data)
+
+
+class CertificateSigningRequestBuilder:
+ def __init__(
+ self,
+ subject_name: typing.Optional[Name] = None,
+ extensions: typing.List[Extension[ExtensionType]] = [],
+ attributes: typing.List[
+ typing.Tuple[ObjectIdentifier, bytes, typing.Optional[int]]
+ ] = [],
+ ):
+ """
+ Creates an empty X.509 certificate request (v1).
+ """
+ self._subject_name = subject_name
+ self._extensions = extensions
+ self._attributes = attributes
+
+ def subject_name(self, name: Name) -> CertificateSigningRequestBuilder:
+ """
+ Sets the certificate requestor's distinguished name.
+ """
+ if not isinstance(name, Name):
+ raise TypeError("Expecting x509.Name object.")
+ if self._subject_name is not None:
+ raise ValueError("The subject name may only be set once.")
+ return CertificateSigningRequestBuilder(
+ name, self._extensions, self._attributes
+ )
+
+ def add_extension(
+ self, extval: ExtensionType, critical: bool
+ ) -> CertificateSigningRequestBuilder:
+ """
+ Adds an X.509 extension to the certificate request.
+ """
+ if not isinstance(extval, ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = Extension(extval.oid, critical, extval)
+ _reject_duplicate_extension(extension, self._extensions)
+
+ return CertificateSigningRequestBuilder(
+ self._subject_name,
+ self._extensions + [extension],
+ self._attributes,
+ )
+
+ def add_attribute(
+ self,
+ oid: ObjectIdentifier,
+ value: bytes,
+ *,
+ _tag: typing.Optional[_ASN1Type] = None,
+ ) -> CertificateSigningRequestBuilder:
+ """
+ Adds an X.509 attribute with an OID and associated value.
+ """
+ if not isinstance(oid, ObjectIdentifier):
+ raise TypeError("oid must be an ObjectIdentifier")
+
+ if not isinstance(value, bytes):
+ raise TypeError("value must be bytes")
+
+ if _tag is not None and not isinstance(_tag, _ASN1Type):
+ raise TypeError("tag must be _ASN1Type")
+
+ _reject_duplicate_attribute(oid, self._attributes)
+
+ if _tag is not None:
+ tag = _tag.value
+ else:
+ tag = None
+
+ return CertificateSigningRequestBuilder(
+ self._subject_name,
+ self._extensions,
+ self._attributes + [(oid, value, tag)],
+ )
+
+ def sign(
+ self,
+ private_key: CertificateIssuerPrivateKeyTypes,
+ algorithm: typing.Optional[_AllowedHashTypes],
+ backend: typing.Any = None,
+ ) -> CertificateSigningRequest:
+ """
+ Signs the request using the requestor's private key.
+ """
+ if self._subject_name is None:
+ raise ValueError("A CertificateSigningRequest must have a subject")
+ return rust_x509.create_x509_csr(self, private_key, algorithm)
+
+
+class CertificateBuilder:
+ _extensions: typing.List[Extension[ExtensionType]]
+
+ def __init__(
+ self,
+ issuer_name: typing.Optional[Name] = None,
+ subject_name: typing.Optional[Name] = None,
+ public_key: typing.Optional[CertificatePublicKeyTypes] = None,
+ serial_number: typing.Optional[int] = None,
+ not_valid_before: typing.Optional[datetime.datetime] = None,
+ not_valid_after: typing.Optional[datetime.datetime] = None,
+ extensions: typing.List[Extension[ExtensionType]] = [],
+ ) -> None:
+ self._version = Version.v3
+ self._issuer_name = issuer_name
+ self._subject_name = subject_name
+ self._public_key = public_key
+ self._serial_number = serial_number
+ self._not_valid_before = not_valid_before
+ self._not_valid_after = not_valid_after
+ self._extensions = extensions
+
+ def issuer_name(self, name: Name) -> CertificateBuilder:
+ """
+ Sets the CA's distinguished name.
+ """
+ if not isinstance(name, Name):
+ raise TypeError("Expecting x509.Name object.")
+ if self._issuer_name is not None:
+ raise ValueError("The issuer name may only be set once.")
+ return CertificateBuilder(
+ name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def subject_name(self, name: Name) -> CertificateBuilder:
+ """
+ Sets the requestor's distinguished name.
+ """
+ if not isinstance(name, Name):
+ raise TypeError("Expecting x509.Name object.")
+ if self._subject_name is not None:
+ raise ValueError("The subject name may only be set once.")
+ return CertificateBuilder(
+ self._issuer_name,
+ name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def public_key(
+ self,
+ key: CertificatePublicKeyTypes,
+ ) -> CertificateBuilder:
+ """
+ Sets the requestor's public key (as found in the signing request).
+ """
+ if not isinstance(
+ key,
+ (
+ dsa.DSAPublicKey,
+ rsa.RSAPublicKey,
+ ec.EllipticCurvePublicKey,
+ ed25519.Ed25519PublicKey,
+ ed448.Ed448PublicKey,
+ x25519.X25519PublicKey,
+ x448.X448PublicKey,
+ ),
+ ):
+ raise TypeError(
+ "Expecting one of DSAPublicKey, RSAPublicKey,"
+ " EllipticCurvePublicKey, Ed25519PublicKey,"
+ " Ed448PublicKey, X25519PublicKey, or "
+ "X448PublicKey."
+ )
+ if self._public_key is not None:
+ raise ValueError("The public key may only be set once.")
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def serial_number(self, number: int) -> CertificateBuilder:
+ """
+ Sets the certificate serial number.
+ """
+ if not isinstance(number, int):
+ raise TypeError("Serial number must be of integral type.")
+ if self._serial_number is not None:
+ raise ValueError("The serial number may only be set once.")
+ if number <= 0:
+ raise ValueError("The serial number should be positive.")
+
+ # ASN.1 integers are always signed, so most significant bit must be
+ # zero.
+ if number.bit_length() >= 160: # As defined in RFC 5280
+ raise ValueError(
+ "The serial number should not be more than 159 " "bits."
+ )
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def not_valid_before(self, time: datetime.datetime) -> CertificateBuilder:
+ """
+ Sets the certificate activation time.
+ """
+ if not isinstance(time, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._not_valid_before is not None:
+ raise ValueError("The not valid before may only be set once.")
+ time = _convert_to_naive_utc_time(time)
+ if time < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The not valid before date must be on or after"
+ " 1950 January 1)."
+ )
+ if self._not_valid_after is not None and time > self._not_valid_after:
+ raise ValueError(
+ "The not valid before date must be before the not valid after "
+ "date."
+ )
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ time,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def not_valid_after(self, time: datetime.datetime) -> CertificateBuilder:
+ """
+ Sets the certificate expiration time.
+ """
+ if not isinstance(time, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._not_valid_after is not None:
+ raise ValueError("The not valid after may only be set once.")
+ time = _convert_to_naive_utc_time(time)
+ if time < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The not valid after date must be on or after"
+ " 1950 January 1."
+ )
+ if (
+ self._not_valid_before is not None
+ and time < self._not_valid_before
+ ):
+ raise ValueError(
+ "The not valid after date must be after the not valid before "
+ "date."
+ )
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ time,
+ self._extensions,
+ )
+
+ def add_extension(
+ self, extval: ExtensionType, critical: bool
+ ) -> CertificateBuilder:
+ """
+ Adds an X.509 extension to the certificate.
+ """
+ if not isinstance(extval, ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = Extension(extval.oid, critical, extval)
+ _reject_duplicate_extension(extension, self._extensions)
+
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions + [extension],
+ )
+
+ def sign(
+ self,
+ private_key: CertificateIssuerPrivateKeyTypes,
+ algorithm: typing.Optional[_AllowedHashTypes],
+ backend: typing.Any = None,
+ *,
+ rsa_padding: typing.Optional[
+ typing.Union[padding.PSS, padding.PKCS1v15]
+ ] = None,
+ ) -> Certificate:
+ """
+ Signs the certificate using the CA's private key.
+ """
+ if self._subject_name is None:
+ raise ValueError("A certificate must have a subject name")
+
+ if self._issuer_name is None:
+ raise ValueError("A certificate must have an issuer name")
+
+ if self._serial_number is None:
+ raise ValueError("A certificate must have a serial number")
+
+ if self._not_valid_before is None:
+ raise ValueError("A certificate must have a not valid before time")
+
+ if self._not_valid_after is None:
+ raise ValueError("A certificate must have a not valid after time")
+
+ if self._public_key is None:
+ raise ValueError("A certificate must have a public key")
+
+ if rsa_padding is not None:
+ if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)):
+ raise TypeError("Padding must be PSS or PKCS1v15")
+ if not isinstance(private_key, rsa.RSAPrivateKey):
+ raise TypeError("Padding is only supported for RSA keys")
+
+ return rust_x509.create_x509_certificate(
+ self, private_key, algorithm, rsa_padding
+ )
+
+
+class CertificateRevocationListBuilder:
+ _extensions: typing.List[Extension[ExtensionType]]
+ _revoked_certificates: typing.List[RevokedCertificate]
+
+ def __init__(
+ self,
+ issuer_name: typing.Optional[Name] = None,
+ last_update: typing.Optional[datetime.datetime] = None,
+ next_update: typing.Optional[datetime.datetime] = None,
+ extensions: typing.List[Extension[ExtensionType]] = [],
+ revoked_certificates: typing.List[RevokedCertificate] = [],
+ ):
+ self._issuer_name = issuer_name
+ self._last_update = last_update
+ self._next_update = next_update
+ self._extensions = extensions
+ self._revoked_certificates = revoked_certificates
+
+ def issuer_name(
+ self, issuer_name: Name
+ ) -> CertificateRevocationListBuilder:
+ if not isinstance(issuer_name, Name):
+ raise TypeError("Expecting x509.Name object.")
+ if self._issuer_name is not None:
+ raise ValueError("The issuer name may only be set once.")
+ return CertificateRevocationListBuilder(
+ issuer_name,
+ self._last_update,
+ self._next_update,
+ self._extensions,
+ self._revoked_certificates,
+ )
+
+ def last_update(
+ self, last_update: datetime.datetime
+ ) -> CertificateRevocationListBuilder:
+ if not isinstance(last_update, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._last_update is not None:
+ raise ValueError("Last update may only be set once.")
+ last_update = _convert_to_naive_utc_time(last_update)
+ if last_update < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The last update date must be on or after" " 1950 January 1."
+ )
+ if self._next_update is not None and last_update > self._next_update:
+ raise ValueError(
+ "The last update date must be before the next update date."
+ )
+ return CertificateRevocationListBuilder(
+ self._issuer_name,
+ last_update,
+ self._next_update,
+ self._extensions,
+ self._revoked_certificates,
+ )
+
+ def next_update(
+ self, next_update: datetime.datetime
+ ) -> CertificateRevocationListBuilder:
+ if not isinstance(next_update, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._next_update is not None:
+ raise ValueError("Last update may only be set once.")
+ next_update = _convert_to_naive_utc_time(next_update)
+ if next_update < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The last update date must be on or after" " 1950 January 1."
+ )
+ if self._last_update is not None and next_update < self._last_update:
+ raise ValueError(
+ "The next update date must be after the last update date."
+ )
+ return CertificateRevocationListBuilder(
+ self._issuer_name,
+ self._last_update,
+ next_update,
+ self._extensions,
+ self._revoked_certificates,
+ )
+
+ def add_extension(
+ self, extval: ExtensionType, critical: bool
+ ) -> CertificateRevocationListBuilder:
+ """
+ Adds an X.509 extension to the certificate revocation list.
+ """
+ if not isinstance(extval, ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = Extension(extval.oid, critical, extval)
+ _reject_duplicate_extension(extension, self._extensions)
+ return CertificateRevocationListBuilder(
+ self._issuer_name,
+ self._last_update,
+ self._next_update,
+ self._extensions + [extension],
+ self._revoked_certificates,
+ )
+
+ def add_revoked_certificate(
+ self, revoked_certificate: RevokedCertificate
+ ) -> CertificateRevocationListBuilder:
+ """
+ Adds a revoked certificate to the CRL.
+ """
+ if not isinstance(revoked_certificate, RevokedCertificate):
+ raise TypeError("Must be an instance of RevokedCertificate")
+
+ return CertificateRevocationListBuilder(
+ self._issuer_name,
+ self._last_update,
+ self._next_update,
+ self._extensions,
+ self._revoked_certificates + [revoked_certificate],
+ )
+
+ def sign(
+ self,
+ private_key: CertificateIssuerPrivateKeyTypes,
+ algorithm: typing.Optional[_AllowedHashTypes],
+ backend: typing.Any = None,
+ ) -> CertificateRevocationList:
+ if self._issuer_name is None:
+ raise ValueError("A CRL must have an issuer name")
+
+ if self._last_update is None:
+ raise ValueError("A CRL must have a last update time")
+
+ if self._next_update is None:
+ raise ValueError("A CRL must have a next update time")
+
+ return rust_x509.create_x509_crl(self, private_key, algorithm)
+
+
+class RevokedCertificateBuilder:
+ def __init__(
+ self,
+ serial_number: typing.Optional[int] = None,
+ revocation_date: typing.Optional[datetime.datetime] = None,
+ extensions: typing.List[Extension[ExtensionType]] = [],
+ ):
+ self._serial_number = serial_number
+ self._revocation_date = revocation_date
+ self._extensions = extensions
+
+ def serial_number(self, number: int) -> RevokedCertificateBuilder:
+ if not isinstance(number, int):
+ raise TypeError("Serial number must be of integral type.")
+ if self._serial_number is not None:
+ raise ValueError("The serial number may only be set once.")
+ if number <= 0:
+ raise ValueError("The serial number should be positive")
+
+ # ASN.1 integers are always signed, so most significant bit must be
+ # zero.
+ if number.bit_length() >= 160: # As defined in RFC 5280
+ raise ValueError(
+ "The serial number should not be more than 159 " "bits."
+ )
+ return RevokedCertificateBuilder(
+ number, self._revocation_date, self._extensions
+ )
+
+ def revocation_date(
+ self, time: datetime.datetime
+ ) -> RevokedCertificateBuilder:
+ if not isinstance(time, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._revocation_date is not None:
+ raise ValueError("The revocation date may only be set once.")
+ time = _convert_to_naive_utc_time(time)
+ if time < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The revocation date must be on or after" " 1950 January 1."
+ )
+ return RevokedCertificateBuilder(
+ self._serial_number, time, self._extensions
+ )
+
+ def add_extension(
+ self, extval: ExtensionType, critical: bool
+ ) -> RevokedCertificateBuilder:
+ if not isinstance(extval, ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = Extension(extval.oid, critical, extval)
+ _reject_duplicate_extension(extension, self._extensions)
+ return RevokedCertificateBuilder(
+ self._serial_number,
+ self._revocation_date,
+ self._extensions + [extension],
+ )
+
+ def build(self, backend: typing.Any = None) -> RevokedCertificate:
+ if self._serial_number is None:
+ raise ValueError("A revoked certificate must have a serial number")
+ if self._revocation_date is None:
+ raise ValueError(
+ "A revoked certificate must have a revocation date"
+ )
+ return _RawRevokedCertificate(
+ self._serial_number,
+ self._revocation_date,
+ Extensions(self._extensions),
+ )
+
+
+def random_serial_number() -> int:
+ return int.from_bytes(os.urandom(20), "big") >> 1
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/x509/certificate_transparency.py b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/certificate_transparency.py
new file mode 100644
index 00000000..73647ee7
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/certificate_transparency.py
@@ -0,0 +1,97 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import datetime
+
+from cryptography import utils
+from cryptography.hazmat.bindings._rust import x509 as rust_x509
+from cryptography.hazmat.primitives.hashes import HashAlgorithm
+
+
+class LogEntryType(utils.Enum):
+ X509_CERTIFICATE = 0
+ PRE_CERTIFICATE = 1
+
+
+class Version(utils.Enum):
+ v1 = 0
+
+
+class SignatureAlgorithm(utils.Enum):
+ """
+ Signature algorithms that are valid for SCTs.
+
+ These are exactly the same as SignatureAlgorithm in RFC 5246 (TLS 1.2).
+
+ See:
+ """
+
+ ANONYMOUS = 0
+ RSA = 1
+ DSA = 2
+ ECDSA = 3
+
+
+class SignedCertificateTimestamp(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def version(self) -> Version:
+ """
+ Returns the SCT version.
+ """
+
+ @property
+ @abc.abstractmethod
+ def log_id(self) -> bytes:
+ """
+ Returns an identifier indicating which log this SCT is for.
+ """
+
+ @property
+ @abc.abstractmethod
+ def timestamp(self) -> datetime.datetime:
+ """
+ Returns the timestamp for this SCT.
+ """
+
+ @property
+ @abc.abstractmethod
+ def entry_type(self) -> LogEntryType:
+ """
+ Returns whether this is an SCT for a certificate or pre-certificate.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_hash_algorithm(self) -> HashAlgorithm:
+ """
+ Returns the hash algorithm used for the SCT's signature.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_algorithm(self) -> SignatureAlgorithm:
+ """
+ Returns the signing algorithm used for the SCT's signature.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature(self) -> bytes:
+ """
+ Returns the signature for this SCT.
+ """
+
+ @property
+ @abc.abstractmethod
+ def extension_bytes(self) -> bytes:
+ """
+ Returns the raw bytes of any extensions for this SCT.
+ """
+
+
+SignedCertificateTimestamp.register(rust_x509.Sct)
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/x509/extensions.py b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/extensions.py
new file mode 100644
index 00000000..ac99592f
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/extensions.py
@@ -0,0 +1,2215 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import datetime
+import hashlib
+import ipaddress
+import typing
+
+from cryptography import utils
+from cryptography.hazmat.bindings._rust import asn1
+from cryptography.hazmat.bindings._rust import x509 as rust_x509
+from cryptography.hazmat.primitives import constant_time, serialization
+from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey
+from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey
+from cryptography.hazmat.primitives.asymmetric.types import (
+ CertificateIssuerPublicKeyTypes,
+ CertificatePublicKeyTypes,
+)
+from cryptography.x509.certificate_transparency import (
+ SignedCertificateTimestamp,
+)
+from cryptography.x509.general_name import (
+ DirectoryName,
+ DNSName,
+ GeneralName,
+ IPAddress,
+ OtherName,
+ RegisteredID,
+ RFC822Name,
+ UniformResourceIdentifier,
+ _IPAddressTypes,
+)
+from cryptography.x509.name import Name, RelativeDistinguishedName
+from cryptography.x509.oid import (
+ CRLEntryExtensionOID,
+ ExtensionOID,
+ ObjectIdentifier,
+ OCSPExtensionOID,
+)
+
+ExtensionTypeVar = typing.TypeVar(
+ "ExtensionTypeVar", bound="ExtensionType", covariant=True
+)
+
+
+def _key_identifier_from_public_key(
+ public_key: CertificatePublicKeyTypes,
+) -> bytes:
+ if isinstance(public_key, RSAPublicKey):
+ data = public_key.public_bytes(
+ serialization.Encoding.DER,
+ serialization.PublicFormat.PKCS1,
+ )
+ elif isinstance(public_key, EllipticCurvePublicKey):
+ data = public_key.public_bytes(
+ serialization.Encoding.X962,
+ serialization.PublicFormat.UncompressedPoint,
+ )
+ else:
+ # This is a very slow way to do this.
+ serialized = public_key.public_bytes(
+ serialization.Encoding.DER,
+ serialization.PublicFormat.SubjectPublicKeyInfo,
+ )
+ data = asn1.parse_spki_for_data(serialized)
+
+ return hashlib.sha1(data).digest()
+
+
+def _make_sequence_methods(field_name: str):
+ def len_method(self) -> int:
+ return len(getattr(self, field_name))
+
+ def iter_method(self):
+ return iter(getattr(self, field_name))
+
+ def getitem_method(self, idx):
+ return getattr(self, field_name)[idx]
+
+ return len_method, iter_method, getitem_method
+
+
+class DuplicateExtension(Exception):
+ def __init__(self, msg: str, oid: ObjectIdentifier) -> None:
+ super().__init__(msg)
+ self.oid = oid
+
+
+class ExtensionNotFound(Exception):
+ def __init__(self, msg: str, oid: ObjectIdentifier) -> None:
+ super().__init__(msg)
+ self.oid = oid
+
+
+class ExtensionType(metaclass=abc.ABCMeta):
+ oid: typing.ClassVar[ObjectIdentifier]
+
+ def public_bytes(self) -> bytes:
+ """
+ Serializes the extension type to DER.
+ """
+ raise NotImplementedError(
+ "public_bytes is not implemented for extension type {!r}".format(
+ self
+ )
+ )
+
+
+class Extensions:
+ def __init__(
+ self, extensions: typing.Iterable[Extension[ExtensionType]]
+ ) -> None:
+ self._extensions = list(extensions)
+
+ def get_extension_for_oid(
+ self, oid: ObjectIdentifier
+ ) -> Extension[ExtensionType]:
+ for ext in self:
+ if ext.oid == oid:
+ return ext
+
+ raise ExtensionNotFound(f"No {oid} extension was found", oid)
+
+ def get_extension_for_class(
+ self, extclass: typing.Type[ExtensionTypeVar]
+ ) -> Extension[ExtensionTypeVar]:
+ if extclass is UnrecognizedExtension:
+ raise TypeError(
+ "UnrecognizedExtension can't be used with "
+ "get_extension_for_class because more than one instance of the"
+ " class may be present."
+ )
+
+ for ext in self:
+ if isinstance(ext.value, extclass):
+ return ext
+
+ raise ExtensionNotFound(
+ f"No {extclass} extension was found", extclass.oid
+ )
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_extensions")
+
+ def __repr__(self) -> str:
+ return f""
+
+
+class CRLNumber(ExtensionType):
+ oid = ExtensionOID.CRL_NUMBER
+
+ def __init__(self, crl_number: int) -> None:
+ if not isinstance(crl_number, int):
+ raise TypeError("crl_number must be an integer")
+
+ self._crl_number = crl_number
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CRLNumber):
+ return NotImplemented
+
+ return self.crl_number == other.crl_number
+
+ def __hash__(self) -> int:
+ return hash(self.crl_number)
+
+ def __repr__(self) -> str:
+ return f""
+
+ @property
+ def crl_number(self) -> int:
+ return self._crl_number
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class AuthorityKeyIdentifier(ExtensionType):
+ oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER
+
+ def __init__(
+ self,
+ key_identifier: typing.Optional[bytes],
+ authority_cert_issuer: typing.Optional[typing.Iterable[GeneralName]],
+ authority_cert_serial_number: typing.Optional[int],
+ ) -> None:
+ if (authority_cert_issuer is None) != (
+ authority_cert_serial_number is None
+ ):
+ raise ValueError(
+ "authority_cert_issuer and authority_cert_serial_number "
+ "must both be present or both None"
+ )
+
+ if authority_cert_issuer is not None:
+ authority_cert_issuer = list(authority_cert_issuer)
+ if not all(
+ isinstance(x, GeneralName) for x in authority_cert_issuer
+ ):
+ raise TypeError(
+ "authority_cert_issuer must be a list of GeneralName "
+ "objects"
+ )
+
+ if authority_cert_serial_number is not None and not isinstance(
+ authority_cert_serial_number, int
+ ):
+ raise TypeError("authority_cert_serial_number must be an integer")
+
+ self._key_identifier = key_identifier
+ self._authority_cert_issuer = authority_cert_issuer
+ self._authority_cert_serial_number = authority_cert_serial_number
+
+ # This takes a subset of CertificatePublicKeyTypes because an issuer
+ # cannot have an X25519/X448 key. This introduces some unfortunate
+ # asymmetry that requires typing users to explicitly
+ # narrow their type, but we should make this accurate and not just
+ # convenient.
+ @classmethod
+ def from_issuer_public_key(
+ cls, public_key: CertificateIssuerPublicKeyTypes
+ ) -> AuthorityKeyIdentifier:
+ digest = _key_identifier_from_public_key(public_key)
+ return cls(
+ key_identifier=digest,
+ authority_cert_issuer=None,
+ authority_cert_serial_number=None,
+ )
+
+ @classmethod
+ def from_issuer_subject_key_identifier(
+ cls, ski: SubjectKeyIdentifier
+ ) -> AuthorityKeyIdentifier:
+ return cls(
+ key_identifier=ski.digest,
+ authority_cert_issuer=None,
+ authority_cert_serial_number=None,
+ )
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, AuthorityKeyIdentifier):
+ return NotImplemented
+
+ return (
+ self.key_identifier == other.key_identifier
+ and self.authority_cert_issuer == other.authority_cert_issuer
+ and self.authority_cert_serial_number
+ == other.authority_cert_serial_number
+ )
+
+ def __hash__(self) -> int:
+ if self.authority_cert_issuer is None:
+ aci = None
+ else:
+ aci = tuple(self.authority_cert_issuer)
+ return hash(
+ (self.key_identifier, aci, self.authority_cert_serial_number)
+ )
+
+ @property
+ def key_identifier(self) -> typing.Optional[bytes]:
+ return self._key_identifier
+
+ @property
+ def authority_cert_issuer(
+ self,
+ ) -> typing.Optional[typing.List[GeneralName]]:
+ return self._authority_cert_issuer
+
+ @property
+ def authority_cert_serial_number(self) -> typing.Optional[int]:
+ return self._authority_cert_serial_number
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class SubjectKeyIdentifier(ExtensionType):
+ oid = ExtensionOID.SUBJECT_KEY_IDENTIFIER
+
+ def __init__(self, digest: bytes) -> None:
+ self._digest = digest
+
+ @classmethod
+ def from_public_key(
+ cls, public_key: CertificatePublicKeyTypes
+ ) -> SubjectKeyIdentifier:
+ return cls(_key_identifier_from_public_key(public_key))
+
+ @property
+ def digest(self) -> bytes:
+ return self._digest
+
+ @property
+ def key_identifier(self) -> bytes:
+ return self._digest
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, SubjectKeyIdentifier):
+ return NotImplemented
+
+ return constant_time.bytes_eq(self.digest, other.digest)
+
+ def __hash__(self) -> int:
+ return hash(self.digest)
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class AuthorityInformationAccess(ExtensionType):
+ oid = ExtensionOID.AUTHORITY_INFORMATION_ACCESS
+
+ def __init__(
+ self, descriptions: typing.Iterable[AccessDescription]
+ ) -> None:
+ descriptions = list(descriptions)
+ if not all(isinstance(x, AccessDescription) for x in descriptions):
+ raise TypeError(
+ "Every item in the descriptions list must be an "
+ "AccessDescription"
+ )
+
+ self._descriptions = descriptions
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions")
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, AuthorityInformationAccess):
+ return NotImplemented
+
+ return self._descriptions == other._descriptions
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._descriptions))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class SubjectInformationAccess(ExtensionType):
+ oid = ExtensionOID.SUBJECT_INFORMATION_ACCESS
+
+ def __init__(
+ self, descriptions: typing.Iterable[AccessDescription]
+ ) -> None:
+ descriptions = list(descriptions)
+ if not all(isinstance(x, AccessDescription) for x in descriptions):
+ raise TypeError(
+ "Every item in the descriptions list must be an "
+ "AccessDescription"
+ )
+
+ self._descriptions = descriptions
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions")
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, SubjectInformationAccess):
+ return NotImplemented
+
+ return self._descriptions == other._descriptions
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._descriptions))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class AccessDescription:
+ def __init__(
+ self, access_method: ObjectIdentifier, access_location: GeneralName
+ ) -> None:
+ if not isinstance(access_method, ObjectIdentifier):
+ raise TypeError("access_method must be an ObjectIdentifier")
+
+ if not isinstance(access_location, GeneralName):
+ raise TypeError("access_location must be a GeneralName")
+
+ self._access_method = access_method
+ self._access_location = access_location
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, AccessDescription):
+ return NotImplemented
+
+ return (
+ self.access_method == other.access_method
+ and self.access_location == other.access_location
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.access_method, self.access_location))
+
+ @property
+ def access_method(self) -> ObjectIdentifier:
+ return self._access_method
+
+ @property
+ def access_location(self) -> GeneralName:
+ return self._access_location
+
+
+class BasicConstraints(ExtensionType):
+ oid = ExtensionOID.BASIC_CONSTRAINTS
+
+ def __init__(self, ca: bool, path_length: typing.Optional[int]) -> None:
+ if not isinstance(ca, bool):
+ raise TypeError("ca must be a boolean value")
+
+ if path_length is not None and not ca:
+ raise ValueError("path_length must be None when ca is False")
+
+ if path_length is not None and (
+ not isinstance(path_length, int) or path_length < 0
+ ):
+ raise TypeError(
+ "path_length must be a non-negative integer or None"
+ )
+
+ self._ca = ca
+ self._path_length = path_length
+
+ @property
+ def ca(self) -> bool:
+ return self._ca
+
+ @property
+ def path_length(self) -> typing.Optional[int]:
+ return self._path_length
+
+ def __repr__(self) -> str:
+ return (
+ ""
+ ).format(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, BasicConstraints):
+ return NotImplemented
+
+ return self.ca == other.ca and self.path_length == other.path_length
+
+ def __hash__(self) -> int:
+ return hash((self.ca, self.path_length))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class DeltaCRLIndicator(ExtensionType):
+ oid = ExtensionOID.DELTA_CRL_INDICATOR
+
+ def __init__(self, crl_number: int) -> None:
+ if not isinstance(crl_number, int):
+ raise TypeError("crl_number must be an integer")
+
+ self._crl_number = crl_number
+
+ @property
+ def crl_number(self) -> int:
+ return self._crl_number
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DeltaCRLIndicator):
+ return NotImplemented
+
+ return self.crl_number == other.crl_number
+
+ def __hash__(self) -> int:
+ return hash(self.crl_number)
+
+ def __repr__(self) -> str:
+ return f""
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class CRLDistributionPoints(ExtensionType):
+ oid = ExtensionOID.CRL_DISTRIBUTION_POINTS
+
+ def __init__(
+ self, distribution_points: typing.Iterable[DistributionPoint]
+ ) -> None:
+ distribution_points = list(distribution_points)
+ if not all(
+ isinstance(x, DistributionPoint) for x in distribution_points
+ ):
+ raise TypeError(
+ "distribution_points must be a list of DistributionPoint "
+ "objects"
+ )
+
+ self._distribution_points = distribution_points
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_distribution_points"
+ )
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CRLDistributionPoints):
+ return NotImplemented
+
+ return self._distribution_points == other._distribution_points
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._distribution_points))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class FreshestCRL(ExtensionType):
+ oid = ExtensionOID.FRESHEST_CRL
+
+ def __init__(
+ self, distribution_points: typing.Iterable[DistributionPoint]
+ ) -> None:
+ distribution_points = list(distribution_points)
+ if not all(
+ isinstance(x, DistributionPoint) for x in distribution_points
+ ):
+ raise TypeError(
+ "distribution_points must be a list of DistributionPoint "
+ "objects"
+ )
+
+ self._distribution_points = distribution_points
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_distribution_points"
+ )
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, FreshestCRL):
+ return NotImplemented
+
+ return self._distribution_points == other._distribution_points
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._distribution_points))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class DistributionPoint:
+ def __init__(
+ self,
+ full_name: typing.Optional[typing.Iterable[GeneralName]],
+ relative_name: typing.Optional[RelativeDistinguishedName],
+ reasons: typing.Optional[typing.FrozenSet[ReasonFlags]],
+ crl_issuer: typing.Optional[typing.Iterable[GeneralName]],
+ ) -> None:
+ if full_name and relative_name:
+ raise ValueError(
+ "You cannot provide both full_name and relative_name, at "
+ "least one must be None."
+ )
+ if not full_name and not relative_name and not crl_issuer:
+ raise ValueError(
+ "Either full_name, relative_name or crl_issuer must be "
+ "provided."
+ )
+
+ if full_name is not None:
+ full_name = list(full_name)
+ if not all(isinstance(x, GeneralName) for x in full_name):
+ raise TypeError(
+ "full_name must be a list of GeneralName objects"
+ )
+
+ if relative_name:
+ if not isinstance(relative_name, RelativeDistinguishedName):
+ raise TypeError(
+ "relative_name must be a RelativeDistinguishedName"
+ )
+
+ if crl_issuer is not None:
+ crl_issuer = list(crl_issuer)
+ if not all(isinstance(x, GeneralName) for x in crl_issuer):
+ raise TypeError(
+ "crl_issuer must be None or a list of general names"
+ )
+
+ if reasons and (
+ not isinstance(reasons, frozenset)
+ or not all(isinstance(x, ReasonFlags) for x in reasons)
+ ):
+ raise TypeError("reasons must be None or frozenset of ReasonFlags")
+
+ if reasons and (
+ ReasonFlags.unspecified in reasons
+ or ReasonFlags.remove_from_crl in reasons
+ ):
+ raise ValueError(
+ "unspecified and remove_from_crl are not valid reasons in a "
+ "DistributionPoint"
+ )
+
+ self._full_name = full_name
+ self._relative_name = relative_name
+ self._reasons = reasons
+ self._crl_issuer = crl_issuer
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DistributionPoint):
+ return NotImplemented
+
+ return (
+ self.full_name == other.full_name
+ and self.relative_name == other.relative_name
+ and self.reasons == other.reasons
+ and self.crl_issuer == other.crl_issuer
+ )
+
+ def __hash__(self) -> int:
+ if self.full_name is not None:
+ fn: typing.Optional[typing.Tuple[GeneralName, ...]] = tuple(
+ self.full_name
+ )
+ else:
+ fn = None
+
+ if self.crl_issuer is not None:
+ crl_issuer: typing.Optional[
+ typing.Tuple[GeneralName, ...]
+ ] = tuple(self.crl_issuer)
+ else:
+ crl_issuer = None
+
+ return hash((fn, self.relative_name, self.reasons, crl_issuer))
+
+ @property
+ def full_name(self) -> typing.Optional[typing.List[GeneralName]]:
+ return self._full_name
+
+ @property
+ def relative_name(self) -> typing.Optional[RelativeDistinguishedName]:
+ return self._relative_name
+
+ @property
+ def reasons(self) -> typing.Optional[typing.FrozenSet[ReasonFlags]]:
+ return self._reasons
+
+ @property
+ def crl_issuer(self) -> typing.Optional[typing.List[GeneralName]]:
+ return self._crl_issuer
+
+
+class ReasonFlags(utils.Enum):
+ unspecified = "unspecified"
+ key_compromise = "keyCompromise"
+ ca_compromise = "cACompromise"
+ affiliation_changed = "affiliationChanged"
+ superseded = "superseded"
+ cessation_of_operation = "cessationOfOperation"
+ certificate_hold = "certificateHold"
+ privilege_withdrawn = "privilegeWithdrawn"
+ aa_compromise = "aACompromise"
+ remove_from_crl = "removeFromCRL"
+
+
+# These are distribution point bit string mappings. Not to be confused with
+# CRLReason reason flags bit string mappings.
+# ReasonFlags ::= BIT STRING {
+# unused (0),
+# keyCompromise (1),
+# cACompromise (2),
+# affiliationChanged (3),
+# superseded (4),
+# cessationOfOperation (5),
+# certificateHold (6),
+# privilegeWithdrawn (7),
+# aACompromise (8) }
+_REASON_BIT_MAPPING = {
+ 1: ReasonFlags.key_compromise,
+ 2: ReasonFlags.ca_compromise,
+ 3: ReasonFlags.affiliation_changed,
+ 4: ReasonFlags.superseded,
+ 5: ReasonFlags.cessation_of_operation,
+ 6: ReasonFlags.certificate_hold,
+ 7: ReasonFlags.privilege_withdrawn,
+ 8: ReasonFlags.aa_compromise,
+}
+
+_CRLREASONFLAGS = {
+ ReasonFlags.key_compromise: 1,
+ ReasonFlags.ca_compromise: 2,
+ ReasonFlags.affiliation_changed: 3,
+ ReasonFlags.superseded: 4,
+ ReasonFlags.cessation_of_operation: 5,
+ ReasonFlags.certificate_hold: 6,
+ ReasonFlags.privilege_withdrawn: 7,
+ ReasonFlags.aa_compromise: 8,
+}
+
+
+class PolicyConstraints(ExtensionType):
+ oid = ExtensionOID.POLICY_CONSTRAINTS
+
+ def __init__(
+ self,
+ require_explicit_policy: typing.Optional[int],
+ inhibit_policy_mapping: typing.Optional[int],
+ ) -> None:
+ if require_explicit_policy is not None and not isinstance(
+ require_explicit_policy, int
+ ):
+ raise TypeError(
+ "require_explicit_policy must be a non-negative integer or "
+ "None"
+ )
+
+ if inhibit_policy_mapping is not None and not isinstance(
+ inhibit_policy_mapping, int
+ ):
+ raise TypeError(
+ "inhibit_policy_mapping must be a non-negative integer or None"
+ )
+
+ if inhibit_policy_mapping is None and require_explicit_policy is None:
+ raise ValueError(
+ "At least one of require_explicit_policy and "
+ "inhibit_policy_mapping must not be None"
+ )
+
+ self._require_explicit_policy = require_explicit_policy
+ self._inhibit_policy_mapping = inhibit_policy_mapping
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, PolicyConstraints):
+ return NotImplemented
+
+ return (
+ self.require_explicit_policy == other.require_explicit_policy
+ and self.inhibit_policy_mapping == other.inhibit_policy_mapping
+ )
+
+ def __hash__(self) -> int:
+ return hash(
+ (self.require_explicit_policy, self.inhibit_policy_mapping)
+ )
+
+ @property
+ def require_explicit_policy(self) -> typing.Optional[int]:
+ return self._require_explicit_policy
+
+ @property
+ def inhibit_policy_mapping(self) -> typing.Optional[int]:
+ return self._inhibit_policy_mapping
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class CertificatePolicies(ExtensionType):
+ oid = ExtensionOID.CERTIFICATE_POLICIES
+
+ def __init__(self, policies: typing.Iterable[PolicyInformation]) -> None:
+ policies = list(policies)
+ if not all(isinstance(x, PolicyInformation) for x in policies):
+ raise TypeError(
+ "Every item in the policies list must be a "
+ "PolicyInformation"
+ )
+
+ self._policies = policies
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_policies")
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CertificatePolicies):
+ return NotImplemented
+
+ return self._policies == other._policies
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._policies))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class PolicyInformation:
+ def __init__(
+ self,
+ policy_identifier: ObjectIdentifier,
+ policy_qualifiers: typing.Optional[
+ typing.Iterable[typing.Union[str, UserNotice]]
+ ],
+ ) -> None:
+ if not isinstance(policy_identifier, ObjectIdentifier):
+ raise TypeError("policy_identifier must be an ObjectIdentifier")
+
+ self._policy_identifier = policy_identifier
+
+ if policy_qualifiers is not None:
+ policy_qualifiers = list(policy_qualifiers)
+ if not all(
+ isinstance(x, (str, UserNotice)) for x in policy_qualifiers
+ ):
+ raise TypeError(
+ "policy_qualifiers must be a list of strings and/or "
+ "UserNotice objects or None"
+ )
+
+ self._policy_qualifiers = policy_qualifiers
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, PolicyInformation):
+ return NotImplemented
+
+ return (
+ self.policy_identifier == other.policy_identifier
+ and self.policy_qualifiers == other.policy_qualifiers
+ )
+
+ def __hash__(self) -> int:
+ if self.policy_qualifiers is not None:
+ pq: typing.Optional[
+ typing.Tuple[typing.Union[str, UserNotice], ...]
+ ] = tuple(self.policy_qualifiers)
+ else:
+ pq = None
+
+ return hash((self.policy_identifier, pq))
+
+ @property
+ def policy_identifier(self) -> ObjectIdentifier:
+ return self._policy_identifier
+
+ @property
+ def policy_qualifiers(
+ self,
+ ) -> typing.Optional[typing.List[typing.Union[str, UserNotice]]]:
+ return self._policy_qualifiers
+
+
+class UserNotice:
+ def __init__(
+ self,
+ notice_reference: typing.Optional[NoticeReference],
+ explicit_text: typing.Optional[str],
+ ) -> None:
+ if notice_reference and not isinstance(
+ notice_reference, NoticeReference
+ ):
+ raise TypeError(
+ "notice_reference must be None or a NoticeReference"
+ )
+
+ self._notice_reference = notice_reference
+ self._explicit_text = explicit_text
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, UserNotice):
+ return NotImplemented
+
+ return (
+ self.notice_reference == other.notice_reference
+ and self.explicit_text == other.explicit_text
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.notice_reference, self.explicit_text))
+
+ @property
+ def notice_reference(self) -> typing.Optional[NoticeReference]:
+ return self._notice_reference
+
+ @property
+ def explicit_text(self) -> typing.Optional[str]:
+ return self._explicit_text
+
+
+class NoticeReference:
+ def __init__(
+ self,
+ organization: typing.Optional[str],
+ notice_numbers: typing.Iterable[int],
+ ) -> None:
+ self._organization = organization
+ notice_numbers = list(notice_numbers)
+ if not all(isinstance(x, int) for x in notice_numbers):
+ raise TypeError("notice_numbers must be a list of integers")
+
+ self._notice_numbers = notice_numbers
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, NoticeReference):
+ return NotImplemented
+
+ return (
+ self.organization == other.organization
+ and self.notice_numbers == other.notice_numbers
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.organization, tuple(self.notice_numbers)))
+
+ @property
+ def organization(self) -> typing.Optional[str]:
+ return self._organization
+
+ @property
+ def notice_numbers(self) -> typing.List[int]:
+ return self._notice_numbers
+
+
+class ExtendedKeyUsage(ExtensionType):
+ oid = ExtensionOID.EXTENDED_KEY_USAGE
+
+ def __init__(self, usages: typing.Iterable[ObjectIdentifier]) -> None:
+ usages = list(usages)
+ if not all(isinstance(x, ObjectIdentifier) for x in usages):
+ raise TypeError(
+ "Every item in the usages list must be an ObjectIdentifier"
+ )
+
+ self._usages = usages
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_usages")
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, ExtendedKeyUsage):
+ return NotImplemented
+
+ return self._usages == other._usages
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._usages))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class OCSPNoCheck(ExtensionType):
+ oid = ExtensionOID.OCSP_NO_CHECK
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, OCSPNoCheck):
+ return NotImplemented
+
+ return True
+
+ def __hash__(self) -> int:
+ return hash(OCSPNoCheck)
+
+ def __repr__(self) -> str:
+ return ""
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class PrecertPoison(ExtensionType):
+ oid = ExtensionOID.PRECERT_POISON
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, PrecertPoison):
+ return NotImplemented
+
+ return True
+
+ def __hash__(self) -> int:
+ return hash(PrecertPoison)
+
+ def __repr__(self) -> str:
+ return ""
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class TLSFeature(ExtensionType):
+ oid = ExtensionOID.TLS_FEATURE
+
+ def __init__(self, features: typing.Iterable[TLSFeatureType]) -> None:
+ features = list(features)
+ if (
+ not all(isinstance(x, TLSFeatureType) for x in features)
+ or len(features) == 0
+ ):
+ raise TypeError(
+ "features must be a list of elements from the TLSFeatureType "
+ "enum"
+ )
+
+ self._features = features
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_features")
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, TLSFeature):
+ return NotImplemented
+
+ return self._features == other._features
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._features))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class TLSFeatureType(utils.Enum):
+ # status_request is defined in RFC 6066 and is used for what is commonly
+ # called OCSP Must-Staple when present in the TLS Feature extension in an
+ # X.509 certificate.
+ status_request = 5
+ # status_request_v2 is defined in RFC 6961 and allows multiple OCSP
+ # responses to be provided. It is not currently in use by clients or
+ # servers.
+ status_request_v2 = 17
+
+
+_TLS_FEATURE_TYPE_TO_ENUM = {x.value: x for x in TLSFeatureType}
+
+
+class InhibitAnyPolicy(ExtensionType):
+ oid = ExtensionOID.INHIBIT_ANY_POLICY
+
+ def __init__(self, skip_certs: int) -> None:
+ if not isinstance(skip_certs, int):
+ raise TypeError("skip_certs must be an integer")
+
+ if skip_certs < 0:
+ raise ValueError("skip_certs must be a non-negative integer")
+
+ self._skip_certs = skip_certs
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, InhibitAnyPolicy):
+ return NotImplemented
+
+ return self.skip_certs == other.skip_certs
+
+ def __hash__(self) -> int:
+ return hash(self.skip_certs)
+
+ @property
+ def skip_certs(self) -> int:
+ return self._skip_certs
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class KeyUsage(ExtensionType):
+ oid = ExtensionOID.KEY_USAGE
+
+ def __init__(
+ self,
+ digital_signature: bool,
+ content_commitment: bool,
+ key_encipherment: bool,
+ data_encipherment: bool,
+ key_agreement: bool,
+ key_cert_sign: bool,
+ crl_sign: bool,
+ encipher_only: bool,
+ decipher_only: bool,
+ ) -> None:
+ if not key_agreement and (encipher_only or decipher_only):
+ raise ValueError(
+ "encipher_only and decipher_only can only be true when "
+ "key_agreement is true"
+ )
+
+ self._digital_signature = digital_signature
+ self._content_commitment = content_commitment
+ self._key_encipherment = key_encipherment
+ self._data_encipherment = data_encipherment
+ self._key_agreement = key_agreement
+ self._key_cert_sign = key_cert_sign
+ self._crl_sign = crl_sign
+ self._encipher_only = encipher_only
+ self._decipher_only = decipher_only
+
+ @property
+ def digital_signature(self) -> bool:
+ return self._digital_signature
+
+ @property
+ def content_commitment(self) -> bool:
+ return self._content_commitment
+
+ @property
+ def key_encipherment(self) -> bool:
+ return self._key_encipherment
+
+ @property
+ def data_encipherment(self) -> bool:
+ return self._data_encipherment
+
+ @property
+ def key_agreement(self) -> bool:
+ return self._key_agreement
+
+ @property
+ def key_cert_sign(self) -> bool:
+ return self._key_cert_sign
+
+ @property
+ def crl_sign(self) -> bool:
+ return self._crl_sign
+
+ @property
+ def encipher_only(self) -> bool:
+ if not self.key_agreement:
+ raise ValueError(
+ "encipher_only is undefined unless key_agreement is true"
+ )
+ else:
+ return self._encipher_only
+
+ @property
+ def decipher_only(self) -> bool:
+ if not self.key_agreement:
+ raise ValueError(
+ "decipher_only is undefined unless key_agreement is true"
+ )
+ else:
+ return self._decipher_only
+
+ def __repr__(self) -> str:
+ try:
+ encipher_only = self.encipher_only
+ decipher_only = self.decipher_only
+ except ValueError:
+ # Users found None confusing because even though encipher/decipher
+ # have no meaning unless key_agreement is true, to construct an
+ # instance of the class you still need to pass False.
+ encipher_only = False
+ decipher_only = False
+
+ return (
+ ""
+ ).format(self, encipher_only, decipher_only)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, KeyUsage):
+ return NotImplemented
+
+ return (
+ self.digital_signature == other.digital_signature
+ and self.content_commitment == other.content_commitment
+ and self.key_encipherment == other.key_encipherment
+ and self.data_encipherment == other.data_encipherment
+ and self.key_agreement == other.key_agreement
+ and self.key_cert_sign == other.key_cert_sign
+ and self.crl_sign == other.crl_sign
+ and self._encipher_only == other._encipher_only
+ and self._decipher_only == other._decipher_only
+ )
+
+ def __hash__(self) -> int:
+ return hash(
+ (
+ self.digital_signature,
+ self.content_commitment,
+ self.key_encipherment,
+ self.data_encipherment,
+ self.key_agreement,
+ self.key_cert_sign,
+ self.crl_sign,
+ self._encipher_only,
+ self._decipher_only,
+ )
+ )
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class NameConstraints(ExtensionType):
+ oid = ExtensionOID.NAME_CONSTRAINTS
+
+ def __init__(
+ self,
+ permitted_subtrees: typing.Optional[typing.Iterable[GeneralName]],
+ excluded_subtrees: typing.Optional[typing.Iterable[GeneralName]],
+ ) -> None:
+ if permitted_subtrees is not None:
+ permitted_subtrees = list(permitted_subtrees)
+ if not permitted_subtrees:
+ raise ValueError(
+ "permitted_subtrees must be a non-empty list or None"
+ )
+ if not all(isinstance(x, GeneralName) for x in permitted_subtrees):
+ raise TypeError(
+ "permitted_subtrees must be a list of GeneralName objects "
+ "or None"
+ )
+
+ self._validate_tree(permitted_subtrees)
+
+ if excluded_subtrees is not None:
+ excluded_subtrees = list(excluded_subtrees)
+ if not excluded_subtrees:
+ raise ValueError(
+ "excluded_subtrees must be a non-empty list or None"
+ )
+ if not all(isinstance(x, GeneralName) for x in excluded_subtrees):
+ raise TypeError(
+ "excluded_subtrees must be a list of GeneralName objects "
+ "or None"
+ )
+
+ self._validate_tree(excluded_subtrees)
+
+ if permitted_subtrees is None and excluded_subtrees is None:
+ raise ValueError(
+ "At least one of permitted_subtrees and excluded_subtrees "
+ "must not be None"
+ )
+
+ self._permitted_subtrees = permitted_subtrees
+ self._excluded_subtrees = excluded_subtrees
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, NameConstraints):
+ return NotImplemented
+
+ return (
+ self.excluded_subtrees == other.excluded_subtrees
+ and self.permitted_subtrees == other.permitted_subtrees
+ )
+
+ def _validate_tree(self, tree: typing.Iterable[GeneralName]) -> None:
+ self._validate_ip_name(tree)
+ self._validate_dns_name(tree)
+
+ def _validate_ip_name(self, tree: typing.Iterable[GeneralName]) -> None:
+ if any(
+ isinstance(name, IPAddress)
+ and not isinstance(
+ name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network)
+ )
+ for name in tree
+ ):
+ raise TypeError(
+ "IPAddress name constraints must be an IPv4Network or"
+ " IPv6Network object"
+ )
+
+ def _validate_dns_name(self, tree: typing.Iterable[GeneralName]) -> None:
+ if any(
+ isinstance(name, DNSName) and "*" in name.value for name in tree
+ ):
+ raise ValueError(
+ "DNSName name constraints must not contain the '*' wildcard"
+ " character"
+ )
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __hash__(self) -> int:
+ if self.permitted_subtrees is not None:
+ ps: typing.Optional[typing.Tuple[GeneralName, ...]] = tuple(
+ self.permitted_subtrees
+ )
+ else:
+ ps = None
+
+ if self.excluded_subtrees is not None:
+ es: typing.Optional[typing.Tuple[GeneralName, ...]] = tuple(
+ self.excluded_subtrees
+ )
+ else:
+ es = None
+
+ return hash((ps, es))
+
+ @property
+ def permitted_subtrees(
+ self,
+ ) -> typing.Optional[typing.List[GeneralName]]:
+ return self._permitted_subtrees
+
+ @property
+ def excluded_subtrees(
+ self,
+ ) -> typing.Optional[typing.List[GeneralName]]:
+ return self._excluded_subtrees
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class Extension(typing.Generic[ExtensionTypeVar]):
+ def __init__(
+ self, oid: ObjectIdentifier, critical: bool, value: ExtensionTypeVar
+ ) -> None:
+ if not isinstance(oid, ObjectIdentifier):
+ raise TypeError(
+ "oid argument must be an ObjectIdentifier instance."
+ )
+
+ if not isinstance(critical, bool):
+ raise TypeError("critical must be a boolean value")
+
+ self._oid = oid
+ self._critical = critical
+ self._value = value
+
+ @property
+ def oid(self) -> ObjectIdentifier:
+ return self._oid
+
+ @property
+ def critical(self) -> bool:
+ return self._critical
+
+ @property
+ def value(self) -> ExtensionTypeVar:
+ return self._value
+
+ def __repr__(self) -> str:
+ return (
+ ""
+ ).format(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Extension):
+ return NotImplemented
+
+ return (
+ self.oid == other.oid
+ and self.critical == other.critical
+ and self.value == other.value
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.oid, self.critical, self.value))
+
+
+class GeneralNames:
+ def __init__(self, general_names: typing.Iterable[GeneralName]) -> None:
+ general_names = list(general_names)
+ if not all(isinstance(x, GeneralName) for x in general_names):
+ raise TypeError(
+ "Every item in the general_names list must be an "
+ "object conforming to the GeneralName interface"
+ )
+
+ self._general_names = general_names
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[UniformResourceIdentifier],
+ typing.Type[RFC822Name],
+ ],
+ ) -> typing.List[str]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[DirectoryName],
+ ) -> typing.List[Name]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[RegisteredID],
+ ) -> typing.List[ObjectIdentifier]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[IPAddress]
+ ) -> typing.List[_IPAddressTypes]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[OtherName]
+ ) -> typing.List[OtherName]:
+ ...
+
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[DirectoryName],
+ typing.Type[IPAddress],
+ typing.Type[OtherName],
+ typing.Type[RFC822Name],
+ typing.Type[RegisteredID],
+ typing.Type[UniformResourceIdentifier],
+ ],
+ ) -> typing.Union[
+ typing.List[_IPAddressTypes],
+ typing.List[str],
+ typing.List[OtherName],
+ typing.List[Name],
+ typing.List[ObjectIdentifier],
+ ]:
+ # Return the value of each GeneralName, except for OtherName instances
+ # which we return directly because it has two important properties not
+ # just one value.
+ objs = (i for i in self if isinstance(i, type))
+ if type != OtherName:
+ return [i.value for i in objs]
+ return list(objs)
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, GeneralNames):
+ return NotImplemented
+
+ return self._general_names == other._general_names
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._general_names))
+
+
+class SubjectAlternativeName(ExtensionType):
+ oid = ExtensionOID.SUBJECT_ALTERNATIVE_NAME
+
+ def __init__(self, general_names: typing.Iterable[GeneralName]) -> None:
+ self._general_names = GeneralNames(general_names)
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[UniformResourceIdentifier],
+ typing.Type[RFC822Name],
+ ],
+ ) -> typing.List[str]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[DirectoryName],
+ ) -> typing.List[Name]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[RegisteredID],
+ ) -> typing.List[ObjectIdentifier]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[IPAddress]
+ ) -> typing.List[_IPAddressTypes]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[OtherName]
+ ) -> typing.List[OtherName]:
+ ...
+
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[DirectoryName],
+ typing.Type[IPAddress],
+ typing.Type[OtherName],
+ typing.Type[RFC822Name],
+ typing.Type[RegisteredID],
+ typing.Type[UniformResourceIdentifier],
+ ],
+ ) -> typing.Union[
+ typing.List[_IPAddressTypes],
+ typing.List[str],
+ typing.List[OtherName],
+ typing.List[Name],
+ typing.List[ObjectIdentifier],
+ ]:
+ return self._general_names.get_values_for_type(type)
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, SubjectAlternativeName):
+ return NotImplemented
+
+ return self._general_names == other._general_names
+
+ def __hash__(self) -> int:
+ return hash(self._general_names)
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class IssuerAlternativeName(ExtensionType):
+ oid = ExtensionOID.ISSUER_ALTERNATIVE_NAME
+
+ def __init__(self, general_names: typing.Iterable[GeneralName]) -> None:
+ self._general_names = GeneralNames(general_names)
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[UniformResourceIdentifier],
+ typing.Type[RFC822Name],
+ ],
+ ) -> typing.List[str]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[DirectoryName],
+ ) -> typing.List[Name]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[RegisteredID],
+ ) -> typing.List[ObjectIdentifier]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[IPAddress]
+ ) -> typing.List[_IPAddressTypes]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[OtherName]
+ ) -> typing.List[OtherName]:
+ ...
+
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[DirectoryName],
+ typing.Type[IPAddress],
+ typing.Type[OtherName],
+ typing.Type[RFC822Name],
+ typing.Type[RegisteredID],
+ typing.Type[UniformResourceIdentifier],
+ ],
+ ) -> typing.Union[
+ typing.List[_IPAddressTypes],
+ typing.List[str],
+ typing.List[OtherName],
+ typing.List[Name],
+ typing.List[ObjectIdentifier],
+ ]:
+ return self._general_names.get_values_for_type(type)
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, IssuerAlternativeName):
+ return NotImplemented
+
+ return self._general_names == other._general_names
+
+ def __hash__(self) -> int:
+ return hash(self._general_names)
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class CertificateIssuer(ExtensionType):
+ oid = CRLEntryExtensionOID.CERTIFICATE_ISSUER
+
+ def __init__(self, general_names: typing.Iterable[GeneralName]) -> None:
+ self._general_names = GeneralNames(general_names)
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[UniformResourceIdentifier],
+ typing.Type[RFC822Name],
+ ],
+ ) -> typing.List[str]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[DirectoryName],
+ ) -> typing.List[Name]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[RegisteredID],
+ ) -> typing.List[ObjectIdentifier]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[IPAddress]
+ ) -> typing.List[_IPAddressTypes]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[OtherName]
+ ) -> typing.List[OtherName]:
+ ...
+
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[DirectoryName],
+ typing.Type[IPAddress],
+ typing.Type[OtherName],
+ typing.Type[RFC822Name],
+ typing.Type[RegisteredID],
+ typing.Type[UniformResourceIdentifier],
+ ],
+ ) -> typing.Union[
+ typing.List[_IPAddressTypes],
+ typing.List[str],
+ typing.List[OtherName],
+ typing.List[Name],
+ typing.List[ObjectIdentifier],
+ ]:
+ return self._general_names.get_values_for_type(type)
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CertificateIssuer):
+ return NotImplemented
+
+ return self._general_names == other._general_names
+
+ def __hash__(self) -> int:
+ return hash(self._general_names)
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class CRLReason(ExtensionType):
+ oid = CRLEntryExtensionOID.CRL_REASON
+
+ def __init__(self, reason: ReasonFlags) -> None:
+ if not isinstance(reason, ReasonFlags):
+ raise TypeError("reason must be an element from ReasonFlags")
+
+ self._reason = reason
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CRLReason):
+ return NotImplemented
+
+ return self.reason == other.reason
+
+ def __hash__(self) -> int:
+ return hash(self.reason)
+
+ @property
+ def reason(self) -> ReasonFlags:
+ return self._reason
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class InvalidityDate(ExtensionType):
+ oid = CRLEntryExtensionOID.INVALIDITY_DATE
+
+ def __init__(self, invalidity_date: datetime.datetime) -> None:
+ if not isinstance(invalidity_date, datetime.datetime):
+ raise TypeError("invalidity_date must be a datetime.datetime")
+
+ self._invalidity_date = invalidity_date
+
+ def __repr__(self) -> str:
+ return "".format(
+ self._invalidity_date
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, InvalidityDate):
+ return NotImplemented
+
+ return self.invalidity_date == other.invalidity_date
+
+ def __hash__(self) -> int:
+ return hash(self.invalidity_date)
+
+ @property
+ def invalidity_date(self) -> datetime.datetime:
+ return self._invalidity_date
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class PrecertificateSignedCertificateTimestamps(ExtensionType):
+ oid = ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS
+
+ def __init__(
+ self,
+ signed_certificate_timestamps: typing.Iterable[
+ SignedCertificateTimestamp
+ ],
+ ) -> None:
+ signed_certificate_timestamps = list(signed_certificate_timestamps)
+ if not all(
+ isinstance(sct, SignedCertificateTimestamp)
+ for sct in signed_certificate_timestamps
+ ):
+ raise TypeError(
+ "Every item in the signed_certificate_timestamps list must be "
+ "a SignedCertificateTimestamp"
+ )
+ self._signed_certificate_timestamps = signed_certificate_timestamps
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_signed_certificate_timestamps"
+ )
+
+ def __repr__(self) -> str:
+ return "".format(
+ list(self)
+ )
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._signed_certificate_timestamps))
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, PrecertificateSignedCertificateTimestamps):
+ return NotImplemented
+
+ return (
+ self._signed_certificate_timestamps
+ == other._signed_certificate_timestamps
+ )
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class SignedCertificateTimestamps(ExtensionType):
+ oid = ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS
+
+ def __init__(
+ self,
+ signed_certificate_timestamps: typing.Iterable[
+ SignedCertificateTimestamp
+ ],
+ ) -> None:
+ signed_certificate_timestamps = list(signed_certificate_timestamps)
+ if not all(
+ isinstance(sct, SignedCertificateTimestamp)
+ for sct in signed_certificate_timestamps
+ ):
+ raise TypeError(
+ "Every item in the signed_certificate_timestamps list must be "
+ "a SignedCertificateTimestamp"
+ )
+ self._signed_certificate_timestamps = signed_certificate_timestamps
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_signed_certificate_timestamps"
+ )
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._signed_certificate_timestamps))
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, SignedCertificateTimestamps):
+ return NotImplemented
+
+ return (
+ self._signed_certificate_timestamps
+ == other._signed_certificate_timestamps
+ )
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class OCSPNonce(ExtensionType):
+ oid = OCSPExtensionOID.NONCE
+
+ def __init__(self, nonce: bytes) -> None:
+ if not isinstance(nonce, bytes):
+ raise TypeError("nonce must be bytes")
+
+ self._nonce = nonce
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, OCSPNonce):
+ return NotImplemented
+
+ return self.nonce == other.nonce
+
+ def __hash__(self) -> int:
+ return hash(self.nonce)
+
+ def __repr__(self) -> str:
+ return f""
+
+ @property
+ def nonce(self) -> bytes:
+ return self._nonce
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class OCSPAcceptableResponses(ExtensionType):
+ oid = OCSPExtensionOID.ACCEPTABLE_RESPONSES
+
+ def __init__(self, responses: typing.Iterable[ObjectIdentifier]) -> None:
+ responses = list(responses)
+ if any(not isinstance(r, ObjectIdentifier) for r in responses):
+ raise TypeError("All responses must be ObjectIdentifiers")
+
+ self._responses = responses
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, OCSPAcceptableResponses):
+ return NotImplemented
+
+ return self._responses == other._responses
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._responses))
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __iter__(self) -> typing.Iterator[ObjectIdentifier]:
+ return iter(self._responses)
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class IssuingDistributionPoint(ExtensionType):
+ oid = ExtensionOID.ISSUING_DISTRIBUTION_POINT
+
+ def __init__(
+ self,
+ full_name: typing.Optional[typing.Iterable[GeneralName]],
+ relative_name: typing.Optional[RelativeDistinguishedName],
+ only_contains_user_certs: bool,
+ only_contains_ca_certs: bool,
+ only_some_reasons: typing.Optional[typing.FrozenSet[ReasonFlags]],
+ indirect_crl: bool,
+ only_contains_attribute_certs: bool,
+ ) -> None:
+ if full_name is not None:
+ full_name = list(full_name)
+
+ if only_some_reasons and (
+ not isinstance(only_some_reasons, frozenset)
+ or not all(isinstance(x, ReasonFlags) for x in only_some_reasons)
+ ):
+ raise TypeError(
+ "only_some_reasons must be None or frozenset of ReasonFlags"
+ )
+
+ if only_some_reasons and (
+ ReasonFlags.unspecified in only_some_reasons
+ or ReasonFlags.remove_from_crl in only_some_reasons
+ ):
+ raise ValueError(
+ "unspecified and remove_from_crl are not valid reasons in an "
+ "IssuingDistributionPoint"
+ )
+
+ if not (
+ isinstance(only_contains_user_certs, bool)
+ and isinstance(only_contains_ca_certs, bool)
+ and isinstance(indirect_crl, bool)
+ and isinstance(only_contains_attribute_certs, bool)
+ ):
+ raise TypeError(
+ "only_contains_user_certs, only_contains_ca_certs, "
+ "indirect_crl and only_contains_attribute_certs "
+ "must all be boolean."
+ )
+
+ crl_constraints = [
+ only_contains_user_certs,
+ only_contains_ca_certs,
+ indirect_crl,
+ only_contains_attribute_certs,
+ ]
+
+ if len([x for x in crl_constraints if x]) > 1:
+ raise ValueError(
+ "Only one of the following can be set to True: "
+ "only_contains_user_certs, only_contains_ca_certs, "
+ "indirect_crl, only_contains_attribute_certs"
+ )
+
+ if not any(
+ [
+ only_contains_user_certs,
+ only_contains_ca_certs,
+ indirect_crl,
+ only_contains_attribute_certs,
+ full_name,
+ relative_name,
+ only_some_reasons,
+ ]
+ ):
+ raise ValueError(
+ "Cannot create empty extension: "
+ "if only_contains_user_certs, only_contains_ca_certs, "
+ "indirect_crl, and only_contains_attribute_certs are all False"
+ ", then either full_name, relative_name, or only_some_reasons "
+ "must have a value."
+ )
+
+ self._only_contains_user_certs = only_contains_user_certs
+ self._only_contains_ca_certs = only_contains_ca_certs
+ self._indirect_crl = indirect_crl
+ self._only_contains_attribute_certs = only_contains_attribute_certs
+ self._only_some_reasons = only_some_reasons
+ self._full_name = full_name
+ self._relative_name = relative_name
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, IssuingDistributionPoint):
+ return NotImplemented
+
+ return (
+ self.full_name == other.full_name
+ and self.relative_name == other.relative_name
+ and self.only_contains_user_certs == other.only_contains_user_certs
+ and self.only_contains_ca_certs == other.only_contains_ca_certs
+ and self.only_some_reasons == other.only_some_reasons
+ and self.indirect_crl == other.indirect_crl
+ and self.only_contains_attribute_certs
+ == other.only_contains_attribute_certs
+ )
+
+ def __hash__(self) -> int:
+ return hash(
+ (
+ self.full_name,
+ self.relative_name,
+ self.only_contains_user_certs,
+ self.only_contains_ca_certs,
+ self.only_some_reasons,
+ self.indirect_crl,
+ self.only_contains_attribute_certs,
+ )
+ )
+
+ @property
+ def full_name(self) -> typing.Optional[typing.List[GeneralName]]:
+ return self._full_name
+
+ @property
+ def relative_name(self) -> typing.Optional[RelativeDistinguishedName]:
+ return self._relative_name
+
+ @property
+ def only_contains_user_certs(self) -> bool:
+ return self._only_contains_user_certs
+
+ @property
+ def only_contains_ca_certs(self) -> bool:
+ return self._only_contains_ca_certs
+
+ @property
+ def only_some_reasons(
+ self,
+ ) -> typing.Optional[typing.FrozenSet[ReasonFlags]]:
+ return self._only_some_reasons
+
+ @property
+ def indirect_crl(self) -> bool:
+ return self._indirect_crl
+
+ @property
+ def only_contains_attribute_certs(self) -> bool:
+ return self._only_contains_attribute_certs
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class MSCertificateTemplate(ExtensionType):
+ oid = ExtensionOID.MS_CERTIFICATE_TEMPLATE
+
+ def __init__(
+ self,
+ template_id: ObjectIdentifier,
+ major_version: typing.Optional[int],
+ minor_version: typing.Optional[int],
+ ) -> None:
+ if not isinstance(template_id, ObjectIdentifier):
+ raise TypeError("oid must be an ObjectIdentifier")
+ self._template_id = template_id
+ if (
+ major_version is not None and not isinstance(major_version, int)
+ ) or (
+ minor_version is not None and not isinstance(minor_version, int)
+ ):
+ raise TypeError(
+ "major_version and minor_version must be integers or None"
+ )
+ self._major_version = major_version
+ self._minor_version = minor_version
+
+ @property
+ def template_id(self) -> ObjectIdentifier:
+ return self._template_id
+
+ @property
+ def major_version(self) -> typing.Optional[int]:
+ return self._major_version
+
+ @property
+ def minor_version(self) -> typing.Optional[int]:
+ return self._minor_version
+
+ def __repr__(self) -> str:
+ return (
+ f""
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, MSCertificateTemplate):
+ return NotImplemented
+
+ return (
+ self.template_id == other.template_id
+ and self.major_version == other.major_version
+ and self.minor_version == other.minor_version
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.template_id, self.major_version, self.minor_version))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class UnrecognizedExtension(ExtensionType):
+ def __init__(self, oid: ObjectIdentifier, value: bytes) -> None:
+ if not isinstance(oid, ObjectIdentifier):
+ raise TypeError("oid must be an ObjectIdentifier")
+ self._oid = oid
+ self._value = value
+
+ @property
+ def oid(self) -> ObjectIdentifier: # type: ignore[override]
+ return self._oid
+
+ @property
+ def value(self) -> bytes:
+ return self._value
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, UnrecognizedExtension):
+ return NotImplemented
+
+ return self.oid == other.oid and self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash((self.oid, self.value))
+
+ def public_bytes(self) -> bytes:
+ return self.value
diff --git a/Backend/venv/lib/python3.12/site-packages/cryptography/x509/general_name.py b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/general_name.py
new file mode 100644
index 00000000..79271afb
--- /dev/null
+++ b/Backend/venv/lib/python3.12/site-packages/cryptography/x509/general_name.py
@@ -0,0 +1,283 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import abc
+import ipaddress
+import typing
+from email.utils import parseaddr
+
+from cryptography.x509.name import Name
+from cryptography.x509.oid import ObjectIdentifier
+
+_IPAddressTypes = typing.Union[
+ ipaddress.IPv4Address,
+ ipaddress.IPv6Address,
+ ipaddress.IPv4Network,
+ ipaddress.IPv6Network,
+]
+
+
+class UnsupportedGeneralNameType(Exception):
+ pass
+
+
+class GeneralName(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def value(self) -> typing.Any:
+ """
+ Return the value of the object
+ """
+
+
+class RFC822Name(GeneralName):
+ def __init__(self, value: str) -> None:
+ if isinstance(value, str):
+ try:
+ value.encode("ascii")
+ except UnicodeEncodeError:
+ raise ValueError(
+ "RFC822Name values should be passed as an A-label string. "
+ "This means unicode characters should be encoded via "
+ "a library like idna."
+ )
+ else:
+ raise TypeError("value must be string")
+
+ name, address = parseaddr(value)
+ if name or not address:
+ # parseaddr has found a name (e.g. Name ) or the entire
+ # value is an empty string.
+ raise ValueError("Invalid rfc822name value")
+
+ self._value = value
+
+ @property
+ def value(self) -> str:
+ return self._value
+
+ @classmethod
+ def _init_without_validation(cls, value: str) -> RFC822Name:
+ instance = cls.__new__(cls)
+ instance._value = value
+ return instance
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, RFC822Name):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash(self.value)
+
+
+class DNSName(GeneralName):
+ def __init__(self, value: str) -> None:
+ if isinstance(value, str):
+ try:
+ value.encode("ascii")
+ except UnicodeEncodeError:
+ raise ValueError(
+ "DNSName values should be passed as an A-label string. "
+ "This means unicode characters should be encoded via "
+ "a library like idna."
+ )
+ else:
+ raise TypeError("value must be string")
+
+ self._value = value
+
+ @property
+ def value(self) -> str:
+ return self._value
+
+ @classmethod
+ def _init_without_validation(cls, value: str) -> DNSName:
+ instance = cls.__new__(cls)
+ instance._value = value
+ return instance
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DNSName):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash(self.value)
+
+
+class UniformResourceIdentifier(GeneralName):
+ def __init__(self, value: str) -> None:
+ if isinstance(value, str):
+ try:
+ value.encode("ascii")
+ except UnicodeEncodeError:
+ raise ValueError(
+ "URI values should be passed as an A-label string. "
+ "This means unicode characters should be encoded via "
+ "a library like idna."
+ )
+ else:
+ raise TypeError("value must be string")
+
+ self._value = value
+
+ @property
+ def value(self) -> str:
+ return self._value
+
+ @classmethod
+ def _init_without_validation(cls, value: str) -> UniformResourceIdentifier:
+ instance = cls.__new__(cls)
+ instance._value = value
+ return instance
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, UniformResourceIdentifier):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash(self.value)
+
+
+class DirectoryName(GeneralName):
+ def __init__(self, value: Name) -> None:
+ if not isinstance(value, Name):
+ raise TypeError("value must be a Name")
+
+ self._value = value
+
+ @property
+ def value(self) -> Name:
+ return self._value
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DirectoryName):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash(self.value)
+
+
+class RegisteredID(GeneralName):
+ def __init__(self, value: ObjectIdentifier) -> None:
+ if not isinstance(value, ObjectIdentifier):
+ raise TypeError("value must be an ObjectIdentifier")
+
+ self._value = value
+
+ @property
+ def value(self) -> ObjectIdentifier:
+ return self._value
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, RegisteredID):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash(self.value)
+
+
+class IPAddress(GeneralName):
+ def __init__(self, value: _IPAddressTypes) -> None:
+ if not isinstance(
+ value,
+ (
+ ipaddress.IPv4Address,
+ ipaddress.IPv6Address,
+ ipaddress.IPv4Network,
+ ipaddress.IPv6Network,
+ ),
+ ):
+ raise TypeError(
+ "value must be an instance of ipaddress.IPv4Address, "
+ "ipaddress.IPv6Address, ipaddress.IPv4Network, or "
+ "ipaddress.IPv6Network"
+ )
+
+ self._value = value
+
+ @property
+ def value(self) -> _IPAddressTypes:
+ return self._value
+
+ def _packed(self) -> bytes:
+ if isinstance(
+ self.value, (ipaddress.IPv4Address, ipaddress.IPv6Address)
+ ):
+ return self.value.packed
+ else:
+ return (
+ self.value.network_address.packed + self.value.netmask.packed
+ )
+
+ def __repr__(self) -> str:
+ return f"