This commit is contained in:
Iliyan Angelov
2025-11-19 12:27:01 +02:00
parent 2043ac897c
commit 34b4c969d4
469 changed files with 26870 additions and 8329 deletions

266
Backend/venv/bin/prichunkpng Executable file
View File

@@ -0,0 +1,266 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# prichunkpng
# Chunk editing tool.
"""
Make a new PNG by adding, delete, or replacing particular chunks.
"""
import argparse
import collections
# https://docs.python.org/2.7/library/io.html
import io
import re
import string
import struct
import sys
import zlib
# Local module.
import png
Chunk = collections.namedtuple("Chunk", "type content")
class ArgumentError(Exception):
"""A user problem with the command arguments."""
def process(out, args):
"""Process the PNG file args.input to the output, chunk by chunk.
Chunks can be inserted, removed, replaced, or sometimes edited.
Chunks are specified by their 4 byte Chunk Type;
see https://www.w3.org/TR/2003/REC-PNG-20031110/#5Chunk-layout .
The chunks in args.delete will be removed from the stream.
The chunks in args.chunk will be inserted into the stream
with their contents taken from the named files.
Other options on the args object will create particular
ancillary chunks.
.gamma -> gAMA chunk
.sigbit -> sBIT chunk
Chunk types need not be official PNG chunks at all.
Non-standard chunks can be created.
"""
# Convert options to chunks in the args.chunk list
if args.gamma:
v = int(round(1e5 * args.gamma))
bs = io.BytesIO(struct.pack(">I", v))
args.chunk.insert(0, Chunk(b"gAMA", bs))
if args.sigbit:
v = struct.pack("%dB" % len(args.sigbit), *args.sigbit)
bs = io.BytesIO(v)
args.chunk.insert(0, Chunk(b"sBIT", bs))
if args.iccprofile:
# http://www.w3.org/TR/PNG/#11iCCP
v = b"a color profile\x00\x00" + zlib.compress(args.iccprofile.read())
bs = io.BytesIO(v)
args.chunk.insert(0, Chunk(b"iCCP", bs))
if args.transparent:
# https://www.w3.org/TR/2003/REC-PNG-20031110/#11tRNS
v = struct.pack(">%dH" % len(args.transparent), *args.transparent)
bs = io.BytesIO(v)
args.chunk.insert(0, Chunk(b"tRNS", bs))
if args.background:
# https://www.w3.org/TR/2003/REC-PNG-20031110/#11bKGD
v = struct.pack(">%dH" % len(args.background), *args.background)
bs = io.BytesIO(v)
args.chunk.insert(0, Chunk(b"bKGD", bs))
if args.physical:
# https://www.w3.org/TR/PNG/#11pHYs
numbers = re.findall(r"(\d+\.?\d*)", args.physical)
if len(numbers) not in {1, 2}:
raise ArgumentError("One or two numbers are required for --physical")
xppu = float(numbers[0])
if len(numbers) == 1:
yppu = xppu
else:
yppu = float(numbers[1])
unit_spec = 0
if args.physical.endswith("dpi"):
# Convert from DPI to Pixels Per Metre
# 1 inch is 0.0254 metres
l = 0.0254
xppu /= l
yppu /= l
unit_spec = 1
elif args.physical.endswith("ppm"):
unit_spec = 1
v = struct.pack("!LLB", round(xppu), round(yppu), unit_spec)
bs = io.BytesIO(v)
args.chunk.insert(0, Chunk(b"pHYs", bs))
# Create:
# - a set of chunks to delete
# - a dict of chunks to replace
# - a list of chunk to add
delete = set(args.delete)
# The set of chunks to replace are those where the specification says
# that there should be at most one of them.
replacing = set([b"gAMA", b"pHYs", b"sBIT", b"PLTE", b"tRNS", b"sPLT", b"IHDR"])
replace = dict()
add = []
for chunk in args.chunk:
if chunk.type in replacing:
replace[chunk.type] = chunk
else:
add.append(chunk)
input = png.Reader(file=args.input)
return png.write_chunks(out, edit_chunks(input.chunks(), delete, replace, add))
def edit_chunks(chunks, delete, replace, add):
"""
Iterate over chunks, yielding edited chunks.
Subtle: the new chunks have to have their contents .read().
"""
for type, v in chunks:
if type in delete:
continue
if type in replace:
yield type, replace[type].content.read()
del replace[type]
continue
if b"IDAT" <= type <= b"IDAT" and replace:
# If there are any chunks on the replace list by
# the time we reach IDAT, add then all now.
# put them all on the add list.
for chunk in replace.values():
yield chunk.type, chunk.content.read()
replace = dict()
if b"IDAT" <= type <= b"IDAT" and add:
# We reached IDAT; add all remaining chunks now.
for chunk in add:
yield chunk.type, chunk.content.read()
add = []
yield type, v
def chunk_name(s):
"""
Type check a chunk name option value.
"""
# See https://www.w3.org/TR/2003/REC-PNG-20031110/#table51
valid = len(s) == 4 and set(s) <= set(string.ascii_letters)
if not valid:
raise ValueError("Chunk name must be 4 ASCII letters")
return s.encode("ascii")
def comma_list(s):
"""
Convert s, a command separated list of whole numbers,
into a sequence of int.
"""
return tuple(int(v) for v in s.split(","))
def hex_color(s):
"""
Type check and convert a hex color.
"""
if s.startswith("#"):
s = s[1:]
valid = len(s) in [1, 2, 3, 4, 6, 12] and set(s) <= set(string.hexdigits)
if not valid:
raise ValueError("colour must be 1,2,3,4,6, or 12 hex-digits")
# For the 4-bit RGB, expand to 8-bit, by repeating digits.
if len(s) == 3:
s = "".join(c + c for c in s)
if len(s) in [1, 2, 4]:
# Single grey value.
return (int(s, 16),)
if len(s) in [6, 12]:
w = len(s) // 3
return tuple(int(s[i : i + w], 16) for i in range(0, len(s), w))
def main(argv=None):
if argv is None:
argv = sys.argv
argv = argv[1:]
parser = argparse.ArgumentParser()
parser.add_argument("--gamma", type=float, help="Gamma value for gAMA chunk")
parser.add_argument(
"--physical",
type=str,
metavar="x[,y][dpi|ppm]",
help="specify intended pixel size or aspect ratio",
)
parser.add_argument(
"--sigbit",
type=comma_list,
metavar="D[,D[,D[,D]]]",
help="Number of significant bits in each channel",
)
parser.add_argument(
"--iccprofile",
metavar="file.iccp",
type=argparse.FileType("rb"),
help="add an ICC Profile from a file",
)
parser.add_argument(
"--transparent",
type=hex_color,
metavar="#RRGGBB",
help="Specify the colour that is transparent (tRNS chunk)",
)
parser.add_argument(
"--background",
type=hex_color,
metavar="#RRGGBB",
help="background colour for bKGD chunk",
)
parser.add_argument(
"--delete",
action="append",
default=[],
type=chunk_name,
help="delete the chunk",
)
parser.add_argument(
"--chunk",
action="append",
nargs=2,
default=[],
type=str,
help="insert chunk, taking contents from file",
)
parser.add_argument(
"input", nargs="?", default="-", type=png.cli_open, metavar="PNG"
)
args = parser.parse_args(argv)
# Reprocess the chunk arguments, converting each pair into a Chunk.
args.chunk = [
Chunk(chunk_name(type), open(path, "rb")) for type, path in args.chunk
]
return process(png.binary_stdout(), args)
if __name__ == "__main__":
main()

81
Backend/venv/bin/pricolpng Executable file
View File

@@ -0,0 +1,81 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# http://www.python.org/doc/2.4.4/lib/module-itertools.html
import itertools
import sys
import png
Description = """Join PNG images in a column top-to-bottom."""
class FormatError(Exception):
"""
Some problem with the image format.
"""
def join_col(out, l):
"""
Join the list of images.
All input images must be same width and
have the same number of channels.
They are joined top-to-bottom.
`out` is the (open file) destination for the output image.
`l` should be a list of open files (the input image files).
"""
image = 0
stream = 0
# When the first image is read, this will be the reference width,
# which must be the same for all images.
width = None
# Total height (accumulated as images are read).
height = 0
# Accumulated rows.
rows = []
for f in l:
stream += 1
while True:
im = png.Reader(file=f)
try:
im.preamble()
except EOFError:
break
image += 1
if not width:
width = im.width
elif width != im.width:
raise FormatError('Image %d in stream %d has width %d; does not match %d.' %
(image, stream, im.width, width))
height += im.height
# Various bugs here because different numbers of channels and depths go wrong.
w, h, p, info = im.asDirect()
rows.extend(p)
# Alarmingly re-use the last info object.
tinfo = dict(info)
del tinfo['size']
w = png.Writer(width, height, **tinfo)
w.write(out, rows)
def main(argv):
import argparse
parser = argparse.ArgumentParser(description=Description)
parser.add_argument(
"input", nargs="*", default="-", type=png.cli_open, metavar="PNG"
)
args = parser.parse_args()
return join_col(png.binary_stdout(), args.input)
if __name__ == '__main__':
main(sys.argv)

254
Backend/venv/bin/priditherpng Executable file
View File

@@ -0,0 +1,254 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# pipdither
# Error Diffusing image dithering.
# Now with serpentine scanning.
# See http://www.efg2.com/Lab/Library/ImageProcessing/DHALF.TXT
# http://www.python.org/doc/2.4.4/lib/module-bisect.html
from bisect import bisect_left
import png
def dither(
out,
input,
bitdepth=1,
linear=False,
defaultgamma=1.0,
targetgamma=None,
cutoff=0.5, # see :cutoff:default
):
"""Dither the input PNG `inp` into an image with a smaller bit depth
and write the result image onto `out`. `bitdepth` specifies the bit
depth of the new image.
Normally the source image gamma is honoured (the image is
converted into a linear light space before being dithered), but
if the `linear` argument is true then the image is treated as
being linear already: no gamma conversion is done (this is
quicker, and if you don't care much about accuracy, it won't
matter much).
Images with no gamma indication (no ``gAMA`` chunk) are normally
treated as linear (gamma = 1.0), but often it can be better
to assume a different gamma value: For example continuous tone
photographs intended for presentation on the web often carry
an implicit assumption of being encoded with a gamma of about
0.45 (because that's what you get if you just "blat the pixels"
onto a PC framebuffer), so ``defaultgamma=0.45`` might be a
good idea. `defaultgamma` does not override a gamma value
specified in the file itself: It is only used when the file
does not specify a gamma.
If you (pointlessly) specify both `linear` and `defaultgamma`,
`linear` wins.
The gamma of the output image is, by default, the same as the input
image. The `targetgamma` argument can be used to specify a
different gamma for the output image. This effectively recodes the
image to a different gamma, dithering as we go. The gamma specified
is the exponent used to encode the output file (and appears in the
output PNG's ``gAMA`` chunk); it is usually less than 1.
"""
# Encoding is what happened when the PNG was made (and also what
# happens when we output the PNG). Decoding is what we do to the
# source PNG in order to process it.
# The dithering algorithm is not completely general; it
# can only do bit depth reduction, not arbitrary palette changes.
import operator
maxval = 2 ** bitdepth - 1
r = png.Reader(file=input)
_, _, pixels, info = r.asDirect()
planes = info["planes"]
# :todo: make an Exception
assert planes == 1
width = info["size"][0]
sourcemaxval = 2 ** info["bitdepth"] - 1
if linear:
gamma = 1
else:
gamma = info.get("gamma") or defaultgamma
# Calculate an effective gamma for input and output;
# then build tables using those.
# `gamma` (whether it was obtained from the input file or an
# assumed value) is the encoding gamma.
# We need the decoding gamma, which is the reciprocal.
decode = 1.0 / gamma
# `targetdecode` is the assumed gamma that is going to be used
# to decoding the target PNG.
# Note that even though we will _encode_ the target PNG we
# still need the decoding gamma, because
# the table we use maps from PNG pixel value to linear light level.
if targetgamma is None:
targetdecode = decode
else:
targetdecode = 1.0 / targetgamma
incode = build_decode_table(sourcemaxval, decode)
# For encoding, we still build a decode table, because we
# use it inverted (searching with bisect).
outcode = build_decode_table(maxval, targetdecode)
# The table used for choosing output codes. These values represent
# the cutoff points between two adjacent output codes.
# The cutoff parameter can be varied between 0 and 1 to
# preferentially choose lighter (when cutoff > 0.5) or
# darker (when cutoff < 0.5) values.
# :cutoff:default: The default for this used to be 0.75, but
# testing by drj on 2021-07-30 showed that this produces
# banding when dithering left-to-right gradients;
# test with:
# priforgepng grl | priditherpng | kitty icat
choosecode = list(zip(outcode[1:], outcode))
p = cutoff
choosecode = [x[0] * p + x[1] * (1.0 - p) for x in choosecode]
rows = repeat_header(pixels)
dithered_rows = run_dither(incode, choosecode, outcode, width, rows)
dithered_rows = remove_header(dithered_rows)
info["bitdepth"] = bitdepth
info["gamma"] = 1.0 / targetdecode
w = png.Writer(**info)
w.write(out, dithered_rows)
def build_decode_table(maxval, gamma):
"""Build a lookup table for decoding;
table converts from pixel values to linear space.
"""
assert maxval == int(maxval)
assert maxval > 0
f = 1.0 / maxval
table = [f * v for v in range(maxval + 1)]
if gamma != 1.0:
table = [v ** gamma for v in table]
return table
def run_dither(incode, choosecode, outcode, width, rows):
"""
Run an serpentine dither.
Using the incode and choosecode tables.
"""
# Errors diffused downwards (into next row)
ed = [0.0] * width
flipped = False
for row in rows:
# Convert to linear...
row = [incode[v] for v in row]
# Add errors...
row = [e + v for e, v in zip(ed, row)]
if flipped:
row = row[::-1]
targetrow = [0] * width
for i, v in enumerate(row):
# `it` will be the index of the chosen target colour;
it = bisect_left(choosecode, v)
targetrow[i] = it
t = outcode[it]
# err is the error that needs distributing.
err = v - t
# Sierra "Filter Lite" distributes * 2
# as per this diagram. 1 1
ef = err * 0.5
# :todo: consider making rows one wider at each end and
# removing "if"s
if i + 1 < width:
row[i + 1] += ef
ef *= 0.5
ed[i] = ef
if i:
ed[i - 1] += ef
if flipped:
ed = ed[::-1]
targetrow = targetrow[::-1]
yield targetrow
flipped = not flipped
WARMUP_ROWS = 32
def repeat_header(rows):
"""Repeat the first row, to "warm up" the error register."""
for row in rows:
yield row
for _ in range(WARMUP_ROWS):
yield row
break
yield from rows
def remove_header(rows):
"""Remove the same number of rows that repeat_header added."""
for _ in range(WARMUP_ROWS):
next(rows)
yield from rows
def main(argv=None):
import sys
# https://docs.python.org/3.5/library/argparse.html
import argparse
parser = argparse.ArgumentParser()
if argv is None:
argv = sys.argv
progname, *args = argv
parser.add_argument("--bitdepth", type=int, default=1, help="bitdepth of output")
parser.add_argument(
"--cutoff",
type=float,
default=0.5,
help="cutoff to select adjacent output values",
)
parser.add_argument(
"--defaultgamma",
type=float,
default=1.0,
help="gamma value to use when no gamma in input",
)
parser.add_argument("--linear", action="store_true", help="force linear input")
parser.add_argument(
"--targetgamma",
type=float,
help="gamma to use in output (target), defaults to input gamma",
)
parser.add_argument(
"input", nargs="?", default="-", type=png.cli_open, metavar="PNG"
)
ns = parser.parse_args(args)
return dither(png.binary_stdout(), **vars(ns))
if __name__ == "__main__":
main()

275
Backend/venv/bin/priforgepng Executable file
View File

@@ -0,0 +1,275 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# priforgepng
"""Forge PNG image from raw computation."""
from array import array
from fractions import Fraction
import argparse
import re
import sys
import png
def gen_glr(x):
"""Gradient Left to Right"""
return x
def gen_grl(x):
"""Gradient Right to Left"""
return 1 - x
def gen_gtb(x, y):
"""Gradient Top to Bottom"""
return y
def gen_gbt(x, y):
"""Gradient Bottom to Top"""
return 1.0 - y
def gen_rtl(x, y):
"""Radial gradient, centred at Top-Left"""
return max(1 - (float(x) ** 2 + float(y) ** 2) ** 0.5, 0.0)
def gen_rctr(x, y):
"""Radial gradient, centred at Centre"""
return gen_rtl(float(x) - 0.5, float(y) - 0.5)
def gen_rtr(x, y):
"""Radial gradient, centred at Top-Right"""
return gen_rtl(1.0 - float(x), y)
def gen_rbl(x, y):
"""Radial gradient, centred at Bottom-Left"""
return gen_rtl(x, 1.0 - float(y))
def gen_rbr(x, y):
"""Radial gradient, centred at Bottom-Right"""
return gen_rtl(1.0 - float(x), 1.0 - float(y))
def stripe(x, n):
return int(x * n) & 1
def gen_vs2(x):
"""2 Vertical Stripes"""
return stripe(x, 2)
def gen_vs4(x):
"""4 Vertical Stripes"""
return stripe(x, 4)
def gen_vs10(x):
"""10 Vertical Stripes"""
return stripe(x, 10)
def gen_hs2(x, y):
"""2 Horizontal Stripes"""
return stripe(float(y), 2)
def gen_hs4(x, y):
"""4 Horizontal Stripes"""
return stripe(float(y), 4)
def gen_hs10(x, y):
"""10 Horizontal Stripes"""
return stripe(float(y), 10)
def gen_slr(x, y):
"""10 diagonal stripes, rising from Left to Right"""
return stripe(x + y, 10)
def gen_srl(x, y):
"""10 diagonal stripes, rising from Right to Left"""
return stripe(1 + x - y, 10)
def checker(x, y, n):
return stripe(x, n) ^ stripe(y, n)
def gen_ck8(x, y):
"""8 by 8 checkerboard"""
return checker(x, y, 8)
def gen_ck15(x, y):
"""15 by 15 checkerboard"""
return checker(x, y, 15)
def gen_zero(x):
"""All zero (black)"""
return 0
def gen_one(x):
"""All one (white)"""
return 1
def yield_fun_rows(size, bitdepth, pattern):
"""
Create a single channel (monochrome) test pattern.
Yield each row in turn.
"""
width, height = size
maxval = 2 ** bitdepth - 1
if maxval > 255:
typecode = "H"
else:
typecode = "B"
pfun = pattern_function(pattern)
# The coordinates are an integer + 0.5,
# effectively sampling each pixel at its centre.
# This is morally better, and produces all 256 sample values
# in a 256-pixel wide gradient.
# We make a list of x coordinates here and re-use it,
# because Fraction instances are slow to allocate.
xs = [Fraction(x, 2 * width) for x in range(1, 2 * width, 2)]
# The general case is a function in x and y,
# but if the function only takes an x argument,
# it's handled in a special case that is a lot faster.
if n_args(pfun) == 2:
for y in range(height):
a = array(typecode)
fy = Fraction(Fraction(y + 0.5), height)
for fx in xs:
a.append(int(round(maxval * pfun(fx, fy))))
yield a
return
# For functions in x only, it's a _lot_ faster
# to generate a single row and repeatedly yield it
a = array(typecode)
for fx in xs:
a.append(int(round(maxval * pfun(x=fx))))
for y in range(height):
yield a
return
def generate(args):
"""
Create a PNG test image and write the file to stdout.
`args` should be an argparse Namespace instance or similar.
"""
size = args.size
bitdepth = args.depth
out = png.binary_stdout()
for pattern in args.pattern:
rows = yield_fun_rows(size, bitdepth, pattern)
writer = png.Writer(
size[0], size[1], bitdepth=bitdepth, greyscale=True, alpha=False
)
writer.write(out, rows)
def n_args(fun):
"""Number of arguments in fun's argument list."""
return fun.__code__.co_argcount
def pattern_function(pattern):
"""From `pattern`, a string,
return the function for that pattern.
"""
lpat = pattern.lower()
for name, fun in globals().items():
parts = name.split("_")
if parts[0] != "gen":
continue
if parts[1] == lpat:
return fun
def patterns():
"""
List the patterns.
"""
for name, fun in globals().items():
parts = name.split("_")
if parts[0] == "gen":
yield parts[1], fun.__doc__
def dimensions(s):
"""
Typecheck the --size option, which should be
one or two comma separated numbers.
Example: "64,40".
"""
tupl = re.findall(r"\d+", s)
if len(tupl) not in (1, 2):
raise ValueError("%r should be width or width,height" % s)
if len(tupl) == 1:
tupl *= 2
assert len(tupl) == 2
return list(map(int, tupl))
def main(argv=None):
if argv is None:
argv = sys.argv
parser = argparse.ArgumentParser(description="Forge greyscale PNG patterns")
parser.add_argument(
"-l", "--list", action="store_true", help="print list of patterns and exit"
)
parser.add_argument(
"-d", "--depth", default=8, type=int, metavar="N", help="N bits per pixel"
)
parser.add_argument(
"-s",
"--size",
default=[256, 256],
type=dimensions,
metavar="w[,h]",
help="width and height of the image in pixels",
)
parser.add_argument("pattern", nargs="*", help="name of pattern")
args = parser.parse_args(argv[1:])
if args.list:
for name, doc in sorted(patterns()):
print(name, doc, sep="\t")
return
if not args.pattern:
parser.error("--list or pattern is required")
return generate(args)
if __name__ == "__main__":
main()

72
Backend/venv/bin/prigreypng Executable file
View File

@@ -0,0 +1,72 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# prigreypng
# Convert image to grey (L, or LA), but only if that involves no colour change.
import argparse
import array
import png
def as_grey(out, inp):
"""
Convert image to greyscale, but only when no colour change.
This works by using the input G channel (green) as
the output L channel (luminance) and
checking that every pixel is grey as we go.
A non-grey pixel will raise an error.
"""
r = png.Reader(file=inp)
_, _, rows, info = r.asDirect()
if info["greyscale"]:
w = png.Writer(**info)
return w.write(out, rows)
planes = info["planes"]
targetplanes = planes - 2
alpha = info["alpha"]
width, height = info["size"]
typecode = "BH"[info["bitdepth"] > 8]
# Values per target row
vpr = width * targetplanes
def iterasgrey():
for i, row in enumerate(rows):
row = array.array(typecode, row)
targetrow = array.array(typecode, [0] * vpr)
# Copy G (and possibly A) channel.
green = row[0::planes]
if alpha:
targetrow[0::2] = green
targetrow[1::2] = row[3::4]
else:
targetrow = green
# Check R and B channel match.
if green != row[0::planes] or green != row[2::planes]:
raise ValueError("Row %i contains non-grey pixel." % i)
yield targetrow
info["greyscale"] = True
del info["planes"]
w = png.Writer(**info)
return w.write(out, iterasgrey())
def main(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument(
"input", nargs="?", default="-", type=png.cli_open, metavar="PNG"
)
args = parser.parse_args()
return as_grey(png.binary_stdout(), args.input)
if __name__ == "__main__":
import sys
sys.exit(main())

111
Backend/venv/bin/pripalpng Executable file
View File

@@ -0,0 +1,111 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# pripalpng
"""Convert to Palette PNG (without changing colours)"""
import argparse
import collections
# https://docs.python.org/2.7/library/io.html
import io
import string
import zlib
# Local module.
import png
def make_inverse_palette(rows, channels):
"""
The inverse palette maps from tuple to palette index.
"""
palette = {}
for row in rows:
for pixel in png.group(row, channels):
if pixel in palette:
continue
palette[pixel] = len(palette)
return palette
def palette_convert(out, inp, palette_file):
"""
Convert PNG image in `inp` to use a palette, colour type 3,
and write converted image to `out`.
`palette_file` is a file descriptor for the palette to use.
If `palette_file` is None, then `inp` is used as the palette.
"""
if palette_file is None:
inp, palette_file = palette_file, inp
reader = png.Reader(file=palette_file)
w, h, rows, info = asRGBorA8(reader)
channels = info["planes"]
if not inp:
rows = list(rows)
palette_map = make_inverse_palette(rows, channels)
if inp:
reader = png.Reader(file=inp)
w, h, rows, info = asRGBorA8(reader)
channels = info["planes"]
# Default for colours not in palette is to use last entry.
last = len(palette_map) - 1
def map_pixel(p):
return palette_map.get(p, last)
def convert_rows():
for row in rows:
yield [map_pixel(p) for p in png.group(row, channels)]
# Make a palette by sorting the pixels according to their index.
palette = sorted(palette_map.keys(), key=palette_map.get)
pal_info = dict(size=info["size"], palette=palette)
w = png.Writer(**pal_info)
w.write(out, convert_rows())
def asRGBorA8(reader):
"""
Return (width, height, rows, info) converting to RGB,
or RGBA if original has an alpha channel.
"""
_, _, _, info = reader.read()
if info["alpha"]:
return reader.asRGBA8()
else:
return reader.asRGB8()
def main(argv=None):
import sys
import re
if argv is None:
argv = sys.argv
argv = argv[1:]
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--palette", type=png.cli_open)
parser.add_argument(
"input", nargs="?", default="-", type=png.cli_open, metavar="PNG"
)
args = parser.parse_args(argv)
palette_convert(png.binary_stdout(), args.input, args.palette)
if __name__ == "__main__":
main()

355
Backend/venv/bin/pripamtopng Executable file
View File

@@ -0,0 +1,355 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# pripamtopng
#
# Python Raster Image PAM to PNG
import array
import struct
import sys
import png
Description = """Convert NetPBM PAM/PNM format files to PNG."""
def read_pam_header(infile):
"""
Read (the rest of a) PAM header.
`infile` should be positioned immediately after the initial 'P7' line
(at the beginning of the second line).
Returns are as for `read_pnm_header`.
"""
# Unlike PBM, PGM, and PPM, we can read the header a line at a time.
header = dict()
while True:
line = infile.readline().strip()
if line == b"ENDHDR":
break
if not line:
raise EOFError("PAM ended prematurely")
if line[0] == b"#":
continue
line = line.split(None, 1)
key = line[0]
if key not in header:
header[key] = line[1]
else:
header[key] += b" " + line[1]
required = [b"WIDTH", b"HEIGHT", b"DEPTH", b"MAXVAL"]
required_str = b", ".join(required).decode("ascii")
result = []
for token in required:
if token not in header:
raise png.Error("PAM file must specify " + required_str)
try:
x = int(header[token])
except ValueError:
raise png.Error(required_str + " must all be valid integers")
if x <= 0:
raise png.Error(required_str + " must all be positive integers")
result.append(x)
return (b"P7",) + tuple(result)
def read_pnm_header(infile):
"""
Read a PNM header, returning (format,width,height,depth,maxval).
Also reads a PAM header (by using a helper function).
`width` and `height` are in pixels.
`depth` is the number of channels in the image;
for PBM and PGM it is synthesized as 1, for PPM as 3;
for PAM images it is read from the header.
`maxval` is synthesized (as 1) for PBM images.
"""
# Generally, see http://netpbm.sourceforge.net/doc/ppm.html
# and http://netpbm.sourceforge.net/doc/pam.html
# Technically 'P7' must be followed by a newline,
# so by using rstrip() we are being liberal in what we accept.
# I think this is acceptable.
magic = infile.read(3).rstrip()
if magic == b"P7":
# PAM header parsing is completely different.
return read_pam_header(infile)
# Expected number of tokens in header (3 for P4, 4 for P6)
expected = 4
pbm = (b"P1", b"P4")
if magic in pbm:
expected = 3
header = [magic]
# We must read the rest of the header byte by byte because
# the final whitespace character may not be a newline.
# Of course all PNM files in the wild use a newline at this point,
# but we are strong and so we avoid
# the temptation to use readline.
bs = bytearray()
backs = bytearray()
def next():
if backs:
c = bytes(backs[0:1])
del backs[0]
else:
c = infile.read(1)
if not c:
raise png.Error("premature EOF reading PNM header")
bs.extend(c)
return c
def backup():
"""Push last byte of token onto front of backs."""
backs.insert(0, bs[-1])
del bs[-1]
def ignore():
del bs[:]
def tokens():
ls = lexInit
while True:
token, ls = ls()
if token:
yield token
def lexInit():
c = next()
# Skip comments
if b"#" <= c <= b"#":
while c not in b"\n\r":
c = next()
ignore()
return None, lexInit
# Skip whitespace (that precedes a token)
if c.isspace():
ignore()
return None, lexInit
if not c.isdigit():
raise png.Error("unexpected byte %r found in header" % c)
return None, lexNumber
def lexNumber():
# According to the specification it is legal to have comments
# that appear in the middle of a token.
# I've never seen it; and,
# it's a bit awkward to code good lexers in Python (no goto).
# So we break on such cases.
c = next()
while c.isdigit():
c = next()
backup()
token = bs[:]
ignore()
return token, lexInit
for token in tokens():
# All "tokens" are decimal integers, so convert them here.
header.append(int(token))
if len(header) == expected:
break
final = next()
if not final.isspace():
raise png.Error("expected header to end with whitespace, not %r" % final)
if magic in pbm:
# synthesize a MAXVAL
header.append(1)
depth = (1, 3)[magic == b"P6"]
return header[0], header[1], header[2], depth, header[3]
def convert_pnm_plain(w, infile, outfile):
"""
Convert a plain PNM file containing raw pixel data into
a PNG file with the parameters set in the writer object.
Works for plain PGM formats.
"""
# See convert_pnm_binary for the corresponding function for
# binary PNM formats.
rows = scan_rows_from_file_plain(infile, w.width, w.height, w.planes)
w.write(outfile, rows)
def scan_rows_from_file_plain(infile, width, height, planes):
"""
Generate a sequence of rows from the input file `infile`.
The input file should be in a "Netpbm-like" plain format.
The input file should be positioned at the beginning of the
first value (that is, immediately after the header).
The number of pixels to read is taken from
the image dimensions (`width`, `height`, `planes`).
Each row is yielded as a single sequence of values.
"""
# Values per row
vpr = width * planes
values = []
rows_output = 0
# The core problem is that input lines (text lines) may not
# correspond with pixel rows. We use two nested loops.
# The outer loop reads the input one text line at a time;
# this will contain a whole number of values, which are
# added to the `values` list.
# The inner loop strips the first `vpr` values from the
# list, until there aren't enough.
# Note we can't tell how many iterations the inner loop will
# run for, it could be 0 (if not enough values were read to
# make a whole pixel row) or many (if the entire image were
# on one input line), or somewhere in between.
# In PNM there is in general no requirement to have
# correspondence between text lines and pixel rows.
for inp in infile:
values.extend(map(int, inp.split()))
while len(values) >= vpr:
yield values[:vpr]
del values[:vpr]
rows_output += 1
if rows_output >= height:
# Diagnostic here if there are spare values?
return
# Diagnostic here for early EOF?
def convert_pnm_binary(w, infile, outfile):
"""
Convert a PNM file containing raw pixel data into
a PNG file with the parameters set in the writer object.
Works for (binary) PGM, PPM, and PAM formats.
"""
rows = scan_rows_from_file(infile, w.width, w.height, w.planes, w.bitdepth)
w.write(outfile, rows)
def scan_rows_from_file(infile, width, height, planes, bitdepth):
"""
Generate a sequence of rows from the input file `infile`.
The input file should be in a "Netpbm-like" binary format.
The input file should be positioned at the beginning of the first pixel.
The number of pixels to read is taken from
the image dimensions (`width`, `height`, `planes`);
the number of bytes per value is implied by `bitdepth`.
Each row is yielded as a single sequence of values.
"""
# Values per row
vpr = width * planes
# Bytes per row
bpr = vpr
if bitdepth > 8:
assert bitdepth == 16
bpr *= 2
fmt = ">%dH" % vpr
def line():
return array.array("H", struct.unpack(fmt, infile.read(bpr)))
else:
def line():
return array.array("B", infile.read(bpr))
for y in range(height):
yield line()
def parse_args(args):
"""
Create a parser and parse the command line arguments.
"""
from argparse import ArgumentParser
parser = ArgumentParser(description=Description)
version = "%(prog)s " + png.__version__
parser.add_argument("--version", action="version", version=version)
parser.add_argument(
"-c",
"--compression",
type=int,
metavar="level",
help="zlib compression level (0-9)",
)
parser.add_argument(
"input",
nargs="?",
default="-",
type=png.cli_open,
metavar="PAM/PNM",
help="input PAM/PNM file to convert",
)
args = parser.parse_args(args)
return args
def main(argv=None):
if argv is None:
argv = sys.argv
args = parse_args(argv[1:])
# Prepare input and output files
infile = args.input
# Call after parsing, so that --version and --help work.
outfile = png.binary_stdout()
# Encode PNM to PNG
format, width, height, depth, maxval = read_pnm_header(infile)
ok_formats = (b"P2", b"P5", b"P6", b"P7")
if format not in ok_formats:
raise NotImplementedError("file format %s not supported" % format)
# The NetPBM depth (number of channels) completely
# determines the PNG format.
# Observe:
# - L, LA, RGB, RGBA are the 4 modes supported by PNG;
# - they correspond to 1, 2, 3, 4 channels respectively.
# We use the number of channels in the source image to
# determine which one we have.
# We ignore the NetPBM image type and the PAM TUPLTYPE.
greyscale = depth <= 2
pamalpha = depth in (2, 4)
supported = [2 ** x - 1 for x in range(1, 17)]
try:
mi = supported.index(maxval)
except ValueError:
raise NotImplementedError(
"input maxval (%s) not in supported list %s" % (maxval, str(supported))
)
bitdepth = mi + 1
writer = png.Writer(
width,
height,
greyscale=greyscale,
bitdepth=bitdepth,
alpha=pamalpha,
compression=args.compression,
)
plain = format in (b"P1", b"P2", b"P3")
if plain:
convert_pnm_plain(writer, infile, outfile)
else:
convert_pnm_binary(writer, infile, outfile)
if __name__ == "__main__":
try:
sys.exit(main())
except png.Error as e:
print(e, file=sys.stderr)
sys.exit(99)

540
Backend/venv/bin/priplan9topng Executable file
View File

@@ -0,0 +1,540 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# Imported from //depot/prj/plan9topam/master/code/plan9topam.py#4 on
# 2009-06-15.
"""Command line tool to convert from Plan 9 image format to PNG format.
Plan 9 image format description:
https://plan9.io/magic/man2html/6/image
Where possible this tool will use unbuffered read() calls,
so that when finished the file offset is exactly at the end of
the image data.
This is useful for Plan9 subfont files which place font metric
data immediately after the image.
"""
# Test materials
# asset/left.bit is a Plan 9 image file, a leftwards facing Glenda.
# Other materials have to be scrounged from the internet.
# https://plan9.io/sources/plan9/sys/games/lib/sokoban/images/cargo.bit
import array
import collections
import io
# http://www.python.org/doc/2.3.5/lib/module-itertools.html
import itertools
import os
# http://www.python.org/doc/2.3.5/lib/module-re.html
import re
import struct
# http://www.python.org/doc/2.3.5/lib/module-sys.html
import sys
# https://docs.python.org/3/library/tarfile.html
import tarfile
# https://pypi.org/project/pypng/
import png
# internal
import prix
class Error(Exception):
"""Some sort of Plan 9 image error."""
def block(s, n):
return zip(*[iter(s)] * n)
def plan9_as_image(inp):
"""Represent a Plan 9 image file as a png.Image instance, so
that it can be written as a PNG file.
Works with compressed input files and may work with uncompressed files.
"""
# Use inp.raw if available.
# This avoids buffering and means that when the image is processed,
# the resulting input stream is cued up exactly at the end
# of the image.
inp = getattr(inp, "raw", inp)
info, blocks = plan9_open_image(inp)
rows, infodict = plan9_image_rows(blocks, info)
return png.Image(rows, infodict)
def plan9_open_image(inp):
"""Open a Plan9 image file (`inp` should be an already open
file object), and return (`info`, `blocks`) pair.
`info` should be a Plan9 5-tuple;
`blocks` is the input, and it should yield (`row`, `data`)
pairs (see :meth:`pixmeta`).
"""
r = inp.read(11)
if r == b"compressed\n":
info, blocks = decompress(inp)
else:
# Since Python 3, there is a good chance that this path
# doesn't work.
info, blocks = glue(inp, r)
return info, blocks
def glue(f, r):
"""Return (info, stream) pair, given `r` the initial portion of
the metadata that has already been read from the stream `f`.
"""
r = r + f.read(60 - len(r))
return (meta(r), f)
def meta(r):
"""Convert 60 byte bytestring `r`, the metadata from an image file.
Returns a 5-tuple (*chan*,*minx*,*miny*,*limx*,*limy*).
5-tuples may settle into lists in transit.
As per https://plan9.io/magic/man2html/6/image the metadata
comprises 5 words separated by blanks.
As it happens each word starts at an index that is a multiple of 12,
but this routine does not care about that.
"""
r = r.split()
# :todo: raise FormatError
if 5 != len(r):
raise Error("Expected 5 space-separated words in metadata")
r = [r[0]] + [int(x) for x in r[1:]]
return r
def bitdepthof(chan):
"""Return the bitdepth for a Plan9 pixel format string."""
maxd = 0
for c in re.findall(rb"[a-z]\d*", chan):
if c[0] != "x":
maxd = max(maxd, int(c[1:]))
return maxd
def maxvalof(chan):
"""Return the netpbm MAXVAL for a Plan9 pixel format string."""
bitdepth = bitdepthof(chan)
return (2 ** bitdepth) - 1
def plan9_image_rows(blocks, metadata):
"""
Convert (uncompressed) Plan 9 image file to pair of (*rows*, *info*).
This is intended to be used by PyPNG format.
*info* is the image info (metadata) returned in a dictionary,
*rows* is an iterator that yields each row in
boxed row flat pixel format.
`blocks`, should be an iterator of (`row`, `data`) pairs.
"""
chan, minx, miny, limx, limy = metadata
rows = limy - miny
width = limx - minx
nchans = len(re.findall(b"[a-wyz]", chan))
alpha = b"a" in chan
# Iverson's convention for the win!
ncolour = nchans - alpha
greyscale = ncolour == 1
bitdepth = bitdepthof(chan)
maxval = maxvalof(chan)
# PNG style info dict.
meta = dict(
size=(width, rows),
bitdepth=bitdepth,
greyscale=greyscale,
alpha=alpha,
planes=nchans,
)
arraycode = "BH"[bitdepth > 8]
return (
map(
lambda x: array.array(arraycode, itertools.chain(*x)),
block(unpack(blocks, rows, width, chan, maxval), width),
),
meta,
)
def unpack(f, rows, width, chan, maxval):
"""Unpack `f` into pixels.
`chan` describes the pixel format using
the Plan9 syntax ("k8", "r8g8b8", and so on).
Assumes the pixel format has a total channel bit depth
that is either a multiple or a divisor of 8
(the Plan9 image specification requires this).
`f` should be an iterator that returns blocks of input such that
each block contains a whole number of pixels.
The return value is an iterator that yields each pixel as an n-tuple.
"""
def mask(w):
"""An integer, to be used as a mask, with bottom `w` bits set to 1."""
return (1 << w) - 1
def deblock(f, depth, width):
"""A "packer" used to convert multiple bytes into single pixels.
`depth` is the pixel depth in bits (>= 8), `width` is the row width in
pixels.
"""
w = depth // 8
i = 0
for block in f:
for i in range(len(block) // w):
p = block[w * i : w * (i + 1)]
i += w
# Convert little-endian p to integer x
x = 0
s = 1 # scale
for j in p:
x += s * j
s <<= 8
yield x
def bitfunge(f, depth, width):
"""A "packer" used to convert single bytes into multiple pixels.
Depth is the pixel depth (< 8), width is the row width in pixels.
"""
assert 8 / depth == 8 // depth
for block in f:
col = 0
for x in block:
for j in range(8 // depth):
yield x >> (8 - depth)
col += 1
if col == width:
# A row-end forces a new byte even if
# we haven't consumed all of the current byte.
# Effectively rows are bit-padded to make
# a whole number of bytes.
col = 0
break
x <<= depth
# number of bits in each channel
bits = [int(d) for d in re.findall(rb"\d+", chan)]
# colr of each channel
# (r, g, b, k for actual colours, and
# a, m, x for alpha, map-index, and unused)
colr = re.findall(b"[a-z]", chan)
depth = sum(bits)
# Select a "packer" that either:
# - gathers multiple bytes into a single pixel (for depth >= 8); or,
# - splits bytes into several pixels (for depth < 8).
if depth >= 8:
assert depth % 8 == 0
packer = deblock
else:
assert 8 % depth == 0
packer = bitfunge
for x in packer(f, depth, width):
# x is the pixel as an unsigned integer
o = []
# This is a bit yucky.
# Extract each channel from the _most_ significant part of x.
for b, col in zip(bits, colr):
v = (x >> (depth - b)) & mask(b)
x <<= b
if col != "x":
# scale to maxval
v = v * float(maxval) / mask(b)
v = int(v + 0.5)
o.append(v)
yield o
def decompress(f):
"""Decompress a Plan 9 image file.
The input `f` should be a binary file object that
is already cued past the initial 'compressed\n' string.
The return result is (`info`, `blocks`);
`info` is a 5-tuple of the Plan 9 image metadata;
`blocks` is an iterator that yields a (row, data) pair
for each block of data.
"""
r = meta(f.read(60))
return r, decomprest(f, r[4])
def decomprest(f, rows):
"""Iterator that decompresses the rest of a file once the metadata
have been consumed."""
row = 0
while row < rows:
row, o = deblock(f)
yield o
def deblock(f):
"""Decompress a single block from a compressed Plan 9 image file.
Each block starts with 2 decimal strings of 12 bytes each.
Yields a sequence of (row, data) pairs where
`row` is the total number of rows processed
(according to the file format) and
`data` is the decompressed data for this block.
"""
row = int(f.read(12))
size = int(f.read(12))
if not (0 <= size <= 6000):
raise Error("block has invalid size; not a Plan 9 image file?")
# Since each block is at most 6000 bytes we may as well read it all in
# one go.
d = f.read(size)
i = 0
o = []
while i < size:
x = d[i]
i += 1
if x & 0x80:
x = (x & 0x7F) + 1
lit = d[i : i + x]
i += x
o.extend(lit)
continue
# x's high-order bit is 0
length = (x >> 2) + 3
# Offset is made from bottom 2 bits of x and 8 bits of next byte.
# MSByte LSByte
# +---------------------+-------------------------+
# | - - - - - - | x1 x0 | d7 d6 d5 d4 d3 d2 d1 d0 |
# +-----------------------------------------------+
# Had to discover by inspection which way round the bits go,
# because https://plan9.io/magic/man2html/6/image doesn't say.
# that x's 2 bits are most significant.
offset = (x & 3) << 8
offset |= d[i]
i += 1
# Note: complement operator neatly maps (0 to 1023) to (-1 to
# -1024). Adding len(o) gives a (non-negative) offset into o from
# which to start indexing.
offset = ~offset + len(o)
if offset < 0:
raise Error(
"byte offset indexes off the begininning of "
"the output buffer; not a Plan 9 image file?"
)
for j in range(length):
o.append(o[offset + j])
return row, bytes(o)
FontChar = collections.namedtuple("FontChar", "x top bottom left width")
def font_copy(inp, image, out, control):
"""
Convert a Plan 9 font (`inp`, `image`) to a series of PNG images,
and write them out as a tar file to the file object `out`.
Write a text control file out to the file object `control`.
Each valid glyph in the font becomes a single PNG image;
the output is a tar file of all the images.
A Plan 9 font consists of a Plan 9 image immediately
followed by font data.
The image for the font should be the `image` argument,
the file containing the rest of the font data should be the
file object `inp` which should be cued up to the start of
the font data that immediately follows the image.
https://plan9.io/magic/man2html/6/font
"""
# The format is a little unusual, and isn't completely
# clearly documented.
# Each 6-byte structure (see FontChar above) defines
# a rectangular region of the image that is used for each
# glyph.
# The source image region that is used may be strictly
# smaller than the rectangle for the target glyph.
# This seems like a micro-optimisation.
# For each glyph,
# rows above `top` and below `bottom` will not be copied
# from the source (they can be assumed to be blank).
# No space is saved in the source image, since the rows must
# be present.
# `x` is always non-decreasing, so the glyphs appear strictly
# left-to-image in the source image.
# The x of the next glyph is used to
# infer the width of the source rectangle.
# `top` and `bottom` give the y-coordinate of the top- and
# bottom- sides of the rectangle in both source and targets.
# `left` is the x-coordinate of the left-side of the
# rectangle in the target glyph. (equivalently, the amount
# of padding that should be added on the left).
# `width` is the advance-width of the glyph; by convention
# it is 0 for an undefined glyph.
name = getattr(inp, "name", "*subfont*name*not*supplied*")
header = inp.read(36)
n, height, ascent = [int(x) for x in header.split()]
print("baseline", name, ascent, file=control, sep=",")
chs = []
for i in range(n + 1):
bs = inp.read(6)
ch = FontChar(*struct.unpack("<HBBBB", bs))
chs.append(ch)
tar = tarfile.open(mode="w|", fileobj=out)
# Start at 0, increment for every image output
# (recall that not every input glyph has an output image)
output_index = 0
for i in range(n):
ch = chs[i]
if ch.width == 0:
continue
print("png", "index", output_index, "glyph", name, i, file=control, sep=",")
info = dict(image.info, size=(ch.width, height))
target = new_image(info)
source_width = chs[i + 1].x - ch.x
rect = ((ch.left, ch.top), (ch.left + source_width, ch.bottom))
image_draw(target, rect, image, (ch.x, ch.top))
# :todo: add source, glyph, and baseline data here (as a
# private tag?)
o = io.BytesIO()
target.write(o)
binary_size = o.tell()
o.seek(0)
tarinfo = tar.gettarinfo(arcname="%s/glyph%d.png" % (name, i), fileobj=inp)
tarinfo.size = binary_size
tar.addfile(tarinfo, fileobj=o)
output_index += 1
tar.close()
def new_image(info):
"""Return a fresh png.Image instance."""
width, height = info["size"]
vpr = width * info["planes"]
row = lambda: [0] * vpr
rows = [row() for _ in range(height)]
return png.Image(rows, info)
def image_draw(target, rect, source, point):
"""The point `point` in the source image is aligned with the
top-left of rect in the target image, and then the rectangle
in target is replaced with the pixels from `source`.
This routine assumes that both source and target can have
their rows objects indexed (not streamed).
"""
# :todo: there is no attempt to do clipping or channel or
# colour conversion. But maybe later?
if target.info["planes"] != source.info["planes"]:
raise NotImplementedError(
"source and target must have the same number of planes"
)
if target.info["bitdepth"] != source.info["bitdepth"]:
raise NotImplementedError("source and target must have the same bitdepth")
tl, br = rect
left, top = tl
right, bottom = br
height = bottom - top
planes = source.info["planes"]
vpr = (right - left) * planes
source_left, source_top = point
source_l = source_left * planes
source_r = source_l + vpr
target_l = left * planes
target_r = target_l + vpr
for y in range(height):
row = source.rows[y + source_top]
row = row[source_l:source_r]
target.rows[top + y][target_l:target_r] = row
def main(argv=None):
import argparse
parser = argparse.ArgumentParser(description="Convert Plan9 image to PNG")
parser.add_argument(
"input",
nargs="?",
default="-",
type=png.cli_open,
metavar="image",
help="image file in Plan 9 format",
)
parser.add_argument(
"--control",
default=os.path.devnull,
type=argparse.FileType("w"),
metavar="ControlCSV",
help="(when using --font) write a control CSV file to named file",
)
parser.add_argument(
"--font",
action="store_true",
help="process as Plan 9 subfont: output a tar file of PNGs",
)
args = parser.parse_args()
image = plan9_as_image(args.input)
image.stream()
if not args.font:
image.write(png.binary_stdout())
else:
font_copy(args.input, image, png.binary_stdout(), args.control)
if __name__ == "__main__":
sys.exit(main())

33
Backend/venv/bin/pripnglsch Executable file
View File

@@ -0,0 +1,33 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# pripnglsch
# PNG List Chunks
import png
def list_chunks(out, inp):
r = png.Reader(file=inp)
for t, v in r.chunks():
add = ""
if len(v) <= 28:
add = " " + v.hex()
else:
add = " " + v[:26].hex() + "..."
t = t.decode("ascii")
print("%s %10d%s" % (t, len(v), add), file=out)
def main(argv=None):
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument(
"input", nargs="?", default="-", type=png.cli_open, metavar="PNG"
)
args = parser.parse_args()
return list_chunks(sys.stdout, args.input)
if __name__ == "__main__":
main()

101
Backend/venv/bin/pripngtopam Executable file
View File

@@ -0,0 +1,101 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
import struct
import png
def write_pnm(file, plain, rows, meta):
"""
Write a Netpbm PNM (or PAM) file.
*file* output file object;
*plain* (a bool) true if writing plain format (not possible for PAM);
*rows* an iterator for the rows;
*meta* the info dictionary.
"""
meta = dict(meta)
meta["maxval"] = 2 ** meta["bitdepth"] - 1
meta["width"], meta["height"] = meta["size"]
# Number of planes determines both image formats:
# 1 : L to PGM
# 2 : LA to PAM
# 3 : RGB to PPM
# 4 : RGBA to PAM
planes = meta["planes"]
# Assume inputs are from a PNG file.
assert planes in (1, 2, 3, 4)
if planes in (1, 3):
if 1 == planes:
# PGM
# Even if maxval is 1 we use PGM instead of PBM,
# to avoid converting data.
magic = "P5"
if plain:
magic = "P2"
else:
# PPM
magic = "P6"
if plain:
magic = "P3"
header = "{magic} {width:d} {height:d} {maxval:d}\n".format(magic=magic, **meta)
if planes in (2, 4):
# PAM
# See http://netpbm.sourceforge.net/doc/pam.html
if plain:
raise Exception("PAM (%d-plane) does not support plain format" % planes)
if 2 == planes:
tupltype = "GRAYSCALE_ALPHA"
else:
tupltype = "RGB_ALPHA"
header = (
"P7\nWIDTH {width:d}\nHEIGHT {height:d}\n"
"DEPTH {planes:d}\nMAXVAL {maxval:d}\n"
"TUPLTYPE {tupltype}\nENDHDR\n".format(tupltype=tupltype, **meta)
)
file.write(header.encode("ascii"))
# Values per row
vpr = planes * meta["width"]
if plain:
for row in rows:
row_b = b" ".join([b"%d" % v for v in row])
file.write(row_b)
file.write(b"\n")
else:
# format for struct.pack
fmt = ">%d" % vpr
if meta["maxval"] > 0xFF:
fmt = fmt + "H"
else:
fmt = fmt + "B"
for row in rows:
file.write(struct.pack(fmt, *row))
file.flush()
def main(argv=None):
import argparse
parser = argparse.ArgumentParser(description="Convert PNG to PAM")
parser.add_argument("--plain", action="store_true")
parser.add_argument(
"input", nargs="?", default="-", type=png.cli_open, metavar="PNG"
)
args = parser.parse_args()
# Encode PNG to PNM (or PAM)
image = png.Reader(file=args.input)
_, _, rows, info = image.asDirect()
write_pnm(png.binary_stdout(), args.plain, rows, info)
if __name__ == "__main__":
import sys
sys.exit(main())

71
Backend/venv/bin/prirowpng Executable file
View File

@@ -0,0 +1,71 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# http://www.python.org/doc/2.4.4/lib/module-itertools.html
import itertools
import sys
import png
Description = """Join PNG images in a row left-to-right."""
class FormatError(Exception):
"""
Some problem with the image format.
"""
def join_row(out, l):
"""
Concatenate the list of images.
All input images must be same height and
have the same number of channels.
They are concatenated left-to-right.
`out` is the (open file) destination for the output image.
`l` should be a list of open files (the input image files).
"""
l = [png.Reader(file=f) for f in l]
# Ewgh, side effects.
for r in l:
r.preamble()
# The reference height; from the first image.
height = l[0].height
# The total target width
width = 0
for i,r in enumerate(l):
if r.height != height:
raise FormatError('Image %d, height %d, does not match %d.' %
(i, r.height, height))
width += r.width
# Various bugs here because different numbers of channels and depths go wrong.
pixel, info = zip(*[r.asDirect()[2:4] for r in l])
tinfo = dict(info[0])
del tinfo['size']
w = png.Writer(width, height, **tinfo)
def iter_all_rows():
for row in zip(*pixel):
# `row` is a sequence that has one row from each input image.
# list() is required here to hasten the lazy row building;
# not sure if that's a bug in PyPNG or not.
yield list(itertools.chain(*row))
w.write(out, iter_all_rows())
def main(argv):
import argparse
parser = argparse.ArgumentParser(description=Description)
parser.add_argument(
"input", nargs="*", default="-", type=png.cli_open, metavar="PNG"
)
args = parser.parse_args()
return join_row(png.binary_stdout(), args.input)
if __name__ == '__main__':
main(sys.argv)

215
Backend/venv/bin/priweavepng Executable file
View File

@@ -0,0 +1,215 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
# priweavepng
# Weave selected channels from input PNG files into
# a multi-channel output PNG.
import collections
import re
from array import array
import png
"""
priweavepng file1.png [file2.png ...]
The `priweavepng` tool combines channels from the input images and
weaves a selection of those channels into an output image.
Conceptually an intermediate image is formed consisting of
all channels of all input images in the order given on the command line
and in the order of each channel in its image.
Then from 1 to 4 channels are selected and
an image is output with those channels.
The limit on the number of selected channels is
imposed by the PNG image format.
The `-c n` option selects channel `n`.
Further channels can be selected either by repeating the `-c` option,
or using a comma separated list.
For example `-c 3,2,1` will select channels 3, 2, and 1 in that order;
if the input is an RGB PNG, this will swop the Red and Blue channels.
The order is significant, the order in which the options are given is
the order of the output channels.
It is permissible, and sometimes useful
(for example, grey to colour expansion, see below),
to repeat the same channel.
If no `-c` option is used the default is
to select all of the input channels, up to the first 4.
`priweavepng` does not care about the meaning of the channels
and treats them as a matrix of values.
The numer of output channels determines the colour mode of the PNG file:
L (1-channel, Grey), LA (2-channel, Grey+Alpha),
RGB (3-channel, Red+Green+Blue), RGBA (4-channel, Red+Green+Blue+Alpha).
The `priweavepng` tool can be used for a variety of
channel building, swopping, and extraction effects:
Combine 3 grayscale images into RGB colour:
priweavepng grey1.png grey2.png grey3.png
Swop Red and Blue channels in colour image:
priweavepng -c 3 -c 2 -c 1 rgb.png
Extract Green channel as a greyscale image:
priweavepng -c 2 rgb.png
Convert a greyscale image to a colour image (all grey):
priweavepng -c 1 -c 1 -c 1 grey.png
Add alpha mask from a separate (greyscale) image:
priweavepng rgb.png grey.png
Extract alpha mask into a separate (greyscale) image:
priweavepng -c 4 rgba.png
Steal alpha mask from second file and add to first.
Note that the intermediate image in this example has 7 channels:
priweavepng -c 1 -c 2 -c 3 -c 7 rgb.png rgba.png
Take Green channel from 3 successive colour images to make a new RGB image:
priweavepng -c 2 -c 5 -c 8 rgb1.png rgb2.png rgb3.png
"""
Image = collections.namedtuple("Image", "rows info")
# For each channel in the intermediate raster,
# model:
# - image: the input image (0-based);
# - i: the channel index within that image (0-based);
# - bitdepth: the bitdepth of this channel.
Channel = collections.namedtuple("Channel", "image i bitdepth")
class Error(Exception):
pass
def weave(out, args):
"""Stack the input PNG files and extract channels
into a single output PNG.
"""
paths = args.input
if len(paths) < 1:
raise Error("Required input is missing.")
# List of Image instances
images = []
# Channel map. Maps from channel number (starting from 1)
# to an (image_index, channel_index) pair.
channel_map = dict()
channel = 1
for image_index, path in enumerate(paths):
inp = png.cli_open(path)
rows, info = png.Reader(file=inp).asDirect()[2:]
rows = list(rows)
image = Image(rows, info)
images.append(image)
# A later version of PyPNG may intelligently support
# PNG files with heterogenous bitdepths.
# For now, assumes bitdepth of all channels in image
# is the same.
channel_bitdepth = (image.info["bitdepth"],) * image.info["planes"]
for i in range(image.info["planes"]):
channel_map[channel + i] = Channel(image_index, i, channel_bitdepth[i])
channel += image.info["planes"]
assert channel - 1 == sum(image.info["planes"] for image in images)
# If no channels, select up to first 4 as default.
if not args.channel:
args.channel = range(1, channel)[:4]
out_channels = len(args.channel)
if not (0 < out_channels <= 4):
raise Error("Too many channels selected (must be 1 to 4)")
alpha = out_channels in (2, 4)
greyscale = out_channels in (1, 2)
bitdepth = tuple(image.info["bitdepth"] for image in images)
arraytype = "BH"[max(bitdepth) > 8]
size = [image.info["size"] for image in images]
# Currently, fail unless all images same size.
if len(set(size)) > 1:
raise NotImplementedError("Cannot cope when sizes differ - sorry!")
size = size[0]
# Values per row, of output image
vpr = out_channels * size[0]
def weave_row_iter():
"""
Yield each woven row in turn.
"""
# The zip call creates an iterator that yields
# a tuple with each element containing the next row
# for each of the input images.
for row_tuple in zip(*(image.rows for image in images)):
# output row
row = array(arraytype, [0] * vpr)
# for each output channel select correct input channel
for out_channel_i, selection in enumerate(args.channel):
channel = channel_map[selection]
# incoming row (make it an array)
irow = array(arraytype, row_tuple[channel.image])
n = images[channel.image].info["planes"]
row[out_channel_i::out_channels] = irow[channel.i :: n]
yield row
w = png.Writer(
size[0],
size[1],
greyscale=greyscale,
alpha=alpha,
bitdepth=bitdepth,
interlace=args.interlace,
)
w.write(out, weave_row_iter())
def comma_list(s):
"""
Type and return a list of integers.
"""
return [int(c) for c in re.findall(r"\d+", s)]
def main(argv=None):
import argparse
import itertools
import sys
if argv is None:
argv = sys.argv
argv = argv[1:]
parser = argparse.ArgumentParser()
parser.add_argument(
"-c",
"--channel",
action="append",
type=comma_list,
help="list of channels to extract",
)
parser.add_argument("--interlace", action="store_true", help="write interlaced PNG")
parser.add_argument("input", nargs="+")
args = parser.parse_args(argv)
if args.channel:
args.channel = list(itertools.chain(*args.channel))
return weave(png.binary_stdout(), args)
if __name__ == "__main__":
main()

7
Backend/venv/bin/qr Executable file
View File

@@ -0,0 +1,7 @@
#!/home/gnx/Desktop/Hotel-Booking/Backend/venv/bin/python3
import sys
from qrcode.console_scripts import main
if __name__ == '__main__':
if sys.argv[0].endswith('.exe'):
sys.argv[0] = sys.argv[0][:-4]
sys.exit(main())

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,31 @@
# Copyright (C) AB Strakt
# See LICENSE for details.
"""
pyOpenSSL - A simple wrapper around the OpenSSL library
"""
from OpenSSL import SSL, crypto
from OpenSSL.version import (
__author__,
__copyright__,
__email__,
__license__,
__summary__,
__title__,
__uri__,
__version__,
)
__all__ = [
"SSL",
"__author__",
"__copyright__",
"__email__",
"__license__",
"__summary__",
"__title__",
"__uri__",
"__version__",
"crypto",
]

View File

@@ -0,0 +1,129 @@
from __future__ import annotations
import os
import sys
import warnings
from typing import Any, Callable, NoReturn, Union
from cryptography.hazmat.bindings.openssl.binding import Binding
if sys.version_info >= (3, 9):
StrOrBytesPath = Union[str, bytes, os.PathLike[str], os.PathLike[bytes]]
else:
StrOrBytesPath = Union[str, bytes, os.PathLike]
binding = Binding()
ffi = binding.ffi
lib: Any = binding.lib
# This is a special CFFI allocator that does not bother to zero its memory
# after allocation. This has vastly better performance on large allocations and
# so should be used whenever we don't need the memory zeroed out.
no_zero_allocator = ffi.new_allocator(should_clear_after_alloc=False)
def text(charp: Any) -> str:
"""
Get a native string type representing of the given CFFI ``char*`` object.
:param charp: A C-style string represented using CFFI.
:return: :class:`str`
"""
if not charp:
return ""
return ffi.string(charp).decode("utf-8")
def exception_from_error_queue(exception_type: type[Exception]) -> NoReturn:
"""
Convert an OpenSSL library failure into a Python exception.
When a call to the native OpenSSL library fails, this is usually signalled
by the return value, and an error code is stored in an error queue
associated with the current thread. The err library provides functions to
obtain these error codes and textual error messages.
"""
errors = []
while True:
error = lib.ERR_get_error()
if error == 0:
break
errors.append(
(
text(lib.ERR_lib_error_string(error)),
text(lib.ERR_func_error_string(error)),
text(lib.ERR_reason_error_string(error)),
)
)
raise exception_type(errors)
def make_assert(error: type[Exception]) -> Callable[[bool], Any]:
"""
Create an assert function that uses :func:`exception_from_error_queue` to
raise an exception wrapped by *error*.
"""
def openssl_assert(ok: bool) -> None:
"""
If *ok* is not True, retrieve the error from OpenSSL and raise it.
"""
if ok is not True:
exception_from_error_queue(error)
return openssl_assert
def path_bytes(s: StrOrBytesPath) -> bytes:
"""
Convert a Python path to a :py:class:`bytes` for the path which can be
passed into an OpenSSL API accepting a filename.
:param s: A path (valid for os.fspath).
:return: An instance of :py:class:`bytes`.
"""
b = os.fspath(s)
if isinstance(b, str):
return b.encode(sys.getfilesystemencoding())
else:
return b
def byte_string(s: str) -> bytes:
return s.encode("charmap")
# A marker object to observe whether some optional arguments are passed any
# value or not.
UNSPECIFIED = object()
_TEXT_WARNING = "str for {0} is no longer accepted, use bytes"
def text_to_bytes_and_warn(label: str, obj: Any) -> Any:
"""
If ``obj`` is text, emit a warning that it should be bytes instead and try
to convert it to bytes automatically.
:param str label: The name of the parameter from which ``obj`` was taken
(so a developer can easily find the source of the problem and correct
it).
:return: If ``obj`` is the text string type, a ``bytes`` object giving the
UTF-8 encoding of that text is returned. Otherwise, ``obj`` itself is
returned.
"""
if isinstance(obj, str):
warnings.warn(
_TEXT_WARNING.format(label),
category=DeprecationWarning,
stacklevel=3,
)
return obj.encode("utf-8")
return obj

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,40 @@
import ssl
import sys
import cffi
import cryptography
import OpenSSL.SSL
from . import version
_env_info = """\
pyOpenSSL: {pyopenssl}
cryptography: {cryptography}
cffi: {cffi}
cryptography's compiled against OpenSSL: {crypto_openssl_compile}
cryptography's linked OpenSSL: {crypto_openssl_link}
Python's OpenSSL: {python_openssl}
Python executable: {python}
Python version: {python_version}
Platform: {platform}
sys.path: {sys_path}""".format(
pyopenssl=version.__version__,
crypto_openssl_compile=OpenSSL._util.ffi.string(
OpenSSL._util.lib.OPENSSL_VERSION_TEXT,
).decode("ascii"),
crypto_openssl_link=OpenSSL.SSL.SSLeay_version(
OpenSSL.SSL.SSLEAY_VERSION
).decode("ascii"),
python_openssl=getattr(ssl, "OPENSSL_VERSION", "n/a"),
cryptography=cryptography.__version__,
cffi=cffi.__version__,
python=sys.executable,
python_version=sys.version,
platform=sys.platform,
sys_path=sys.path,
)
if __name__ == "__main__":
print(_env_info)

View File

@@ -0,0 +1,50 @@
"""
PRNG management routines, thin wrappers.
"""
from __future__ import annotations
import warnings
from OpenSSL._util import lib as _lib
warnings.warn(
"OpenSSL.rand is deprecated - you should use os.urandom instead",
DeprecationWarning,
stacklevel=3,
)
def add(buffer: bytes, entropy: int) -> None:
"""
Mix bytes from *string* into the PRNG state.
The *entropy* argument is (the lower bound of) an estimate of how much
randomness is contained in *string*, measured in bytes.
For more information, see e.g. :rfc:`1750`.
This function is only relevant if you are forking Python processes and
need to reseed the CSPRNG after fork.
:param buffer: Buffer with random data.
:param entropy: The entropy (in bytes) measurement of the buffer.
:return: :obj:`None`
"""
if not isinstance(buffer, bytes):
raise TypeError("buffer must be a byte string")
if not isinstance(entropy, int):
raise TypeError("entropy must be an integer")
_lib.RAND_add(buffer, len(buffer), entropy)
def status() -> int:
"""
Check whether the PRNG has been seeded with enough data.
:return: 1 if the PRNG is seeded enough, 0 otherwise.
"""
return _lib.RAND_status()

View File

@@ -0,0 +1,28 @@
# Copyright (C) AB Strakt
# Copyright (C) Jean-Paul Calderone
# See LICENSE for details.
"""
pyOpenSSL - A simple wrapper around the OpenSSL library
"""
__all__ = [
"__author__",
"__copyright__",
"__email__",
"__license__",
"__summary__",
"__title__",
"__uri__",
"__version__",
]
__version__ = "25.3.0"
__title__ = "pyOpenSSL"
__uri__ = "https://pyopenssl.org/"
__summary__ = "Python wrapper module around the OpenSSL library"
__author__ = "The pyOpenSSL developers"
__email__ = "cryptography-dev@python.org"
__license__ = "Apache License, Version 2.0"
__copyright__ = f"Copyright 2001-2025 {__author__}"

View File

@@ -1,173 +0,0 @@
cryptography-41.0.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
cryptography-41.0.7.dist-info/LICENSE,sha256=Pgx8CRqUi4JTO6mP18u0BDLW8amsv4X1ki0vmak65rs,197
cryptography-41.0.7.dist-info/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360
cryptography-41.0.7.dist-info/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532
cryptography-41.0.7.dist-info/METADATA,sha256=h4C2cL9sbR7ObF6jD7hUT7xOfSvzZBli6AmX-vngctA,5159
cryptography-41.0.7.dist-info/RECORD,,
cryptography-41.0.7.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
cryptography-41.0.7.dist-info/WHEEL,sha256=Bnup3_Y_tMShHsCuO2E9NdrjRJkTtSD1dYVt3WSGhpU,112
cryptography-41.0.7.dist-info/top_level.txt,sha256=KNaT-Sn2K4uxNaEbe6mYdDn3qWDMlp4y-MtWfB73nJc,13
cryptography/__about__.py,sha256=uPXMbbcptt7EzZ_jllGRx0pVdMn-NBsAM4L74hOv-b0,445
cryptography/__init__.py,sha256=iVPlBlXWTJyiFeRedxcbMPhyHB34viOM10d72vGnWuE,364
cryptography/__pycache__/__about__.cpython-312.pyc,,
cryptography/__pycache__/__init__.cpython-312.pyc,,
cryptography/__pycache__/exceptions.cpython-312.pyc,,
cryptography/__pycache__/fernet.cpython-312.pyc,,
cryptography/__pycache__/utils.cpython-312.pyc,,
cryptography/exceptions.py,sha256=EHe7XM2_OtdOM1bZE0ci-4GUhtOlEQ6fQXhK2Igf0qA,1118
cryptography/fernet.py,sha256=TVZy4Dtkpl7kWIpvuKcNldE95IEjTQ0MfHgRsLdnDSM,6886
cryptography/hazmat/__init__.py,sha256=5IwrLWrVp0AjEr_4FdWG_V057NSJGY_W4egNNsuct0g,455
cryptography/hazmat/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/__pycache__/_oid.cpython-312.pyc,,
cryptography/hazmat/_oid.py,sha256=gxhMHKpu9Xsi6uHCGZ_-soYMXj_izOIFaxjUKWbCPeE,14441
cryptography/hazmat/backends/__init__.py,sha256=O5jvKFQdZnXhKeqJ-HtulaEL9Ni7mr1mDzZY5kHlYhI,361
cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__init__.py,sha256=p3jmJfnCag9iE5sdMrN6VvVEu55u46xaS_IjoI0SrmA,305
cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/aead.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/aead.py,sha256=s3zXcVQf0COIOuOzI8usebWpznGnyZ7GhnmlJYu7QXA,15967
cryptography/hazmat/backends/openssl/backend.py,sha256=491FCrjeOG7S9bXskUosirXFP84ntwAQ-U0BxcibtqM,73321
cryptography/hazmat/backends/openssl/ciphers.py,sha256=lxWrvnufudsDI2bpwNs2c8XLILbAE2j2rMSD1nhnPVg,10358
cryptography/hazmat/backends/openssl/cmac.py,sha256=pHgQOIRfR4cIDa5ltcKFtgjqPTXbOLyRQmmqv9JlbUk,3035
cryptography/hazmat/backends/openssl/decode_asn1.py,sha256=kz6gys8wuJhrx4QyU6enYx7UatNHr0LB3TI1jH3oQ54,1148
cryptography/hazmat/backends/openssl/ec.py,sha256=GKzh3mZKvgsM1jqM88-4XikHHalpV-Efyskclt8yxYg,11474
cryptography/hazmat/backends/openssl/rsa.py,sha256=P_ak-2zvA6VBt_P0ldzTSCUkcjo2GhYt_HLn8CVvWtE,21825
cryptography/hazmat/backends/openssl/utils.py,sha256=UoguO26QzwN4lsMAltsIrgAlbi3SOeSrexZs1-QPNu8,2190
cryptography/hazmat/bindings/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/bindings/_rust.abi3.so,sha256=qkbrd72TN7vk0ivAz_VE-ZefNyDxCLwLSiAzhgMF8-Q,13787648
cryptography/hazmat/bindings/_rust/__init__.pyi,sha256=IumK7zP9Ko3HjLLb5hwZiY2rbfmfsuyTZLLcHOMvSdk,981
cryptography/hazmat/bindings/_rust/_openssl.pyi,sha256=mpNJLuYLbCVrd5i33FBTmWwL_55Dw7JPkSLlSX9Q7oI,230
cryptography/hazmat/bindings/_rust/asn1.pyi,sha256=9CyI-grOsLQB_hfnhJPoG9dNOdJ7Zg6B0iUpzCowh44,592
cryptography/hazmat/bindings/_rust/exceptions.pyi,sha256=exXr2xw_0pB1kk93cYbM3MohbzoUkjOms1ZMUi0uQZE,640
cryptography/hazmat/bindings/_rust/ocsp.pyi,sha256=RzVaLkY0y9L8W8opAL_uVD8bySKxP23pSQtEbLOStXI,905
cryptography/hazmat/bindings/_rust/openssl/__init__.pyi,sha256=j764U4RRBZbDuOfjQxRqU7rCf74kgM-3AnTIjLdRy3E,970
cryptography/hazmat/bindings/_rust/openssl/dh.pyi,sha256=0FVY1t5qM9HV_ZKDIcdJI2a72i1fHKyTvYIJb5UnH4M,896
cryptography/hazmat/bindings/_rust/openssl/dsa.pyi,sha256=43in4PCsm2kz_H7RQFLBKqhDsUmb4yWop6dpYeVDg-4,764
cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi,sha256=E2GXAgibfRGqKxskH8MfZI8gHFoMJJOTjG7Elg2gOww,629
cryptography/hazmat/bindings/_rust/openssl/ed448.pyi,sha256=pk_kx5Biq8O53d2joOT-cXuwCrbFPicV7iaqYdeiIAI,603
cryptography/hazmat/bindings/_rust/openssl/hashes.pyi,sha256=J8HoN0GdtPcjRAfNHr5Elva_nkmQfq63L75_z9dd8Uc,573
cryptography/hazmat/bindings/_rust/openssl/hmac.pyi,sha256=ZmLJ73pmxcZFC1XosWEiXMRYtvJJor3ZLdCQOJu85Cw,662
cryptography/hazmat/bindings/_rust/openssl/kdf.pyi,sha256=wPS5c7NLspM2632II0I4iH1RSxZvSRtBOVqmpyQATfk,544
cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi,sha256=9iogF7Q4i81IkOS-IMXp6HvxFF_3cNy_ucrAjVQnn14,540
cryptography/hazmat/bindings/_rust/openssl/x25519.pyi,sha256=-1F5QDZfrdhmDLKTeSERuuDUHBTV-EhxIYk9mjpwcG4,616
cryptography/hazmat/bindings/_rust/openssl/x448.pyi,sha256=SdL4blscYBEvuWY4SuNAY1s5zFaGj38eQ-bulVBZvFg,590
cryptography/hazmat/bindings/_rust/pkcs7.pyi,sha256=VkTC78wjJgb_qrboOYIFPuFZ3W46zsr6zsxnlrOMwao,460
cryptography/hazmat/bindings/_rust/x509.pyi,sha256=j6AbXBZSXeJHLSrXnaapbiPfle-znfk9uJUa_zqxgy4,1878
cryptography/hazmat/bindings/openssl/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc,,
cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc,,
cryptography/hazmat/bindings/openssl/_conditional.py,sha256=DeECq7AKguhs390ZmxgItdqPLzyrKGJk-3KlHJMkXoY,9098
cryptography/hazmat/bindings/openssl/binding.py,sha256=0x3kzvq2grHu4gbbgEIzEVrX6unp71EEs1hx0o-uuOM,6696
cryptography/hazmat/primitives/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc,,
cryptography/hazmat/primitives/_asymmetric.py,sha256=RhgcouUB6HTiFDBrR1LxqkMjpUxIiNvQ1r_zJjRG6qQ,532
cryptography/hazmat/primitives/_cipheralgorithm.py,sha256=7LPkpw-DrgyvmBMUjvXeBvojVZPtXhFgfelUftnxPGw,1093
cryptography/hazmat/primitives/_serialization.py,sha256=U0DU0ZzOLJppCQsh9EJH6vGYoHotBolfNyRyx3wr1l0,5216
cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/dh.py,sha256=XsthqjvExWWOyePs0PxT4MestU9QeGuL-Hx7fWzTguQ,7013
cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=aaTY7EMLTzaWs-jhOMpMAfa2GnfhoqsCKZPKAs35L40,8263
cryptography/hazmat/primitives/asymmetric/ec.py,sha256=L1WoWPYevJ6Pk2T1etbnHbvr6AeXFccckPNNiyUVoNM,12867
cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=wl2NCCP4bZdUCqZGMkOOd6eaxjU1vXPAIwzUuFPE__w,3489
cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=2MCJ87qcyCCsjj0OvrfWFxPX8CgaC3d0mr78bt_vDIY,3440
cryptography/hazmat/primitives/asymmetric/padding.py,sha256=6p8Ojiax_2tcm1aTnNOAkinriCJ67nSTxugg34f-hzk,2717
cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=vxvOryF00WL8mZQv9bs_-LlgobYLiPYfX246_j_ICtA,11623
cryptography/hazmat/primitives/asymmetric/types.py,sha256=LnsOJym-wmPUJ7Knu_7bCNU3kIiELCd6krOaW_JU08I,2996
cryptography/hazmat/primitives/asymmetric/utils.py,sha256=DPTs6T4F-UhwzFQTh-1fSEpQzazH2jf2xpIro3ItF4o,790
cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=8YJAIaU7w09jTnPU_cLwd98fMHIECgfA3R7P3Ktv-CA,3437
cryptography/hazmat/primitives/asymmetric/x448.py,sha256=y-Yj-rgciiuH1g6FJLZftvAqgOnzT1on9gCisru7vBc,3358
cryptography/hazmat/primitives/ciphers/__init__.py,sha256=kAyb9NSczqTrCWj0HEoVp3Cxo7AHW8ibPFQz-ZHsOtA,680
cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/aead.py,sha256=DY7qKmbt0bgB1GB7i-fQrbjEfwFG8wfUfVHvc7DA2YY,12067
cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=SCDskXc9xyzsz0NjND6tAX8t17jYTbUB2sww1ub9GuY,5000
cryptography/hazmat/primitives/ciphers/base.py,sha256=PqNDltHdDxBhLhgtfO707H07sSOLA6ZVwjZlalOJTAo,8286
cryptography/hazmat/primitives/ciphers/modes.py,sha256=YJQXi4PJGIIZ1rgchbMH47Ed-YiUcUSjLPEOuV8rgGE,8361
cryptography/hazmat/primitives/cmac.py,sha256=YaeWksCYaqVoqf9zHRThAJ95ZvPUioAOfXwZUWiPzD8,2065
cryptography/hazmat/primitives/constant_time.py,sha256=xdunWT0nf8OvKdcqUhhlFKayGp4_PgVJRU2W1wLSr_A,422
cryptography/hazmat/primitives/hashes.py,sha256=VJpnbK2sQN2bEqwRTOoCB4nuxYx5CnqFiScMJNyhsrI,5115
cryptography/hazmat/primitives/hmac.py,sha256=RpB3z9z5skirCQrm7zQbtnp9pLMnAjrlTUvKqF5aDDc,423
cryptography/hazmat/primitives/kdf/__init__.py,sha256=4XibZnrYq4hh5xBjWiIXzaYW6FKx8hPbVaa_cB9zS64,750
cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=wGYWgILmxQWnCPkbAH1RpsCHrdKgmYrCEVrCvXVGCo8,3726
cryptography/hazmat/primitives/kdf/hkdf.py,sha256=bBYr1yUIbOlJIEd6ZoLYcXm_yd-H54An9kNcFIJ3kbo,3045
cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=qPL6TmDUmkus6CW3ylTJfG8N8egZhjQOyXrSyLLpnak,9232
cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=1CCH9Q5gXUpnZd3c8d8bCXgpJ3s2hZZGBnuG7FH1waM,2012
cryptography/hazmat/primitives/kdf/scrypt.py,sha256=4QONhjxA_ZtuQtQ7QV3FnbB8ftrFnM52B4HPfV7hFys,2354
cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=S3B4Enk2Yxj9txpairotaXkavuZqQ6t6MB5a28U02ek,2002
cryptography/hazmat/primitives/keywrap.py,sha256=Qb_N2V_E1Dti5VtDXnrtTYtJDZ8aMpur8BY5yxrXclg,5678
cryptography/hazmat/primitives/padding.py,sha256=8pCeLaqwQPSGf51j06U5C_INvgYWVWPv3m9mxUERGmU,6242
cryptography/hazmat/primitives/poly1305.py,sha256=P5EPQV-RB_FJPahpg01u0Ts4S_PnAmsroxIGXbGeRRo,355
cryptography/hazmat/primitives/serialization/__init__.py,sha256=6ZlL3EicEzoGdMOat86w8y_XICCnlHdCjFI97rMxRDg,1653
cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/base.py,sha256=VZjIIqnbb-x38qpg2Wf_IxZvqjsgcEzNQtQoeJiQfpw,1986
cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=NOzFxArlZhdjfgfugs8nERho1eyaxujXKGUKINchek4,6767
cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=BCvlPubXQOunb76emISK89PX9qXcBQI2CRPNe85VTZk,7392
cryptography/hazmat/primitives/serialization/ssh.py,sha256=aLCYLPY3W1kerfCwadn5aYNzwcwIQl9c7RcsB8CKfuc,51027
cryptography/hazmat/primitives/twofactor/__init__.py,sha256=tmMZGB-g4IU1r7lIFqASU019zr0uPp_wEBYcwdDCKCA,258
cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc,,
cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc,,
cryptography/hazmat/primitives/twofactor/hotp.py,sha256=uZ0PSKYDZOL0aAobiw1Zd2HD0W2Ei1niUNC2v7Tnpc8,3010
cryptography/hazmat/primitives/twofactor/totp.py,sha256=cMbWlAapOM1SfezEx9MoMHpCW9ingNXCg6OsGv4T8jc,1473
cryptography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
cryptography/utils.py,sha256=DfdXc9M4kmAboE2a0pPiISt5LVnW-jhhXURy8nDHae0,4018
cryptography/x509/__init__.py,sha256=DzZE8bR-3iiVi3Wrcq7-g5Pm64fCr5aqsTNyi_rjJu0,7870
cryptography/x509/__pycache__/__init__.cpython-312.pyc,,
cryptography/x509/__pycache__/base.cpython-312.pyc,,
cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc,,
cryptography/x509/__pycache__/extensions.cpython-312.pyc,,
cryptography/x509/__pycache__/general_name.cpython-312.pyc,,
cryptography/x509/__pycache__/name.cpython-312.pyc,,
cryptography/x509/__pycache__/ocsp.cpython-312.pyc,,
cryptography/x509/__pycache__/oid.cpython-312.pyc,,
cryptography/x509/base.py,sha256=FbS6EFE3uJ3O-zbFPRjsO6DckrNSN5TJNZMJcnzUWFQ,35677
cryptography/x509/certificate_transparency.py,sha256=6HvzAD0dlSQVxy6tnDhGj0-pisp1MaJ9bxQNRr92inI,2261
cryptography/x509/extensions.py,sha256=rFEcfZiFvcONs1ot03d68dAMK2U75w0s3g9mhyWBRcI,68365
cryptography/x509/general_name.py,sha256=zm8GxNgVJuLD6rN488c5zdHhxp5gUxeRzw8enZMWDQ0,7868
cryptography/x509/name.py,sha256=aZ2dpsinhkza3eTxT1vNmWuFMQ7fmcA0hs4npgnkf9Q,14855
cryptography/x509/ocsp.py,sha256=48iW7xbZ9mZLELSEl7Wwjb4vYhOQ3KcNtqgKsAb_UD0,18534
cryptography/x509/oid.py,sha256=fFosjGsnIB_w_0YrzZv1ggkSVwZl7xmY0zofKZNZkDA,829

View File

@@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.42.0)
Root-Is-Purelib: false
Tag: cp37-abi3-manylinux_2_28_x86_64

View File

@@ -1,18 +1,8 @@
Metadata-Version: 2.1
Metadata-Version: 2.4
Name: cryptography
Version: 41.0.7
Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers.
Author-email: The Python Cryptographic Authority and individual contributors <cryptography-dev@python.org>
License: Apache-2.0 OR BSD-3-Clause
Project-URL: homepage, https://github.com/pyca/cryptography
Project-URL: documentation, https://cryptography.io/
Project-URL: source, https://github.com/pyca/cryptography/
Project-URL: issues, https://github.com/pyca/cryptography/issues
Project-URL: changelog, https://cryptography.io/en/latest/changelog/
Version: 46.0.3
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: Apache Software License
Classifier: License :: OSI Approved :: BSD License
Classifier: Natural Language :: English
Classifier: Operating System :: MacOS :: MacOS X
Classifier: Operating System :: POSIX
@@ -22,46 +12,62 @@ Classifier: Operating System :: Microsoft :: Windows
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
Classifier: Programming Language :: Python :: 3.14
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Programming Language :: Python :: Free Threading :: 3 - Stable
Classifier: Topic :: Security :: Cryptography
Requires-Python: >=3.7
Description-Content-Type: text/x-rst
Requires-Dist: cffi>=1.14 ; python_full_version == '3.8.*' and platform_python_implementation != 'PyPy'
Requires-Dist: cffi>=2.0.0 ; python_full_version >= '3.9' and platform_python_implementation != 'PyPy'
Requires-Dist: typing-extensions>=4.13.2 ; python_full_version < '3.11'
Requires-Dist: bcrypt>=3.1.5 ; extra == 'ssh'
Requires-Dist: nox[uv]>=2024.4.15 ; extra == 'nox'
Requires-Dist: cryptography-vectors==46.0.3 ; extra == 'test'
Requires-Dist: pytest>=7.4.0 ; extra == 'test'
Requires-Dist: pytest-benchmark>=4.0 ; extra == 'test'
Requires-Dist: pytest-cov>=2.10.1 ; extra == 'test'
Requires-Dist: pytest-xdist>=3.5.0 ; extra == 'test'
Requires-Dist: pretend>=0.7 ; extra == 'test'
Requires-Dist: certifi>=2024 ; extra == 'test'
Requires-Dist: pytest-randomly ; extra == 'test-randomorder'
Requires-Dist: sphinx>=5.3.0 ; extra == 'docs'
Requires-Dist: sphinx-rtd-theme>=3.0.0 ; extra == 'docs'
Requires-Dist: sphinx-inline-tabs ; extra == 'docs'
Requires-Dist: pyenchant>=3 ; extra == 'docstest'
Requires-Dist: readme-renderer>=30.0 ; extra == 'docstest'
Requires-Dist: sphinxcontrib-spelling>=7.3.1 ; extra == 'docstest'
Requires-Dist: build>=1.0.0 ; extra == 'sdist'
Requires-Dist: ruff>=0.11.11 ; extra == 'pep8test'
Requires-Dist: mypy>=1.14 ; extra == 'pep8test'
Requires-Dist: check-sdist ; extra == 'pep8test'
Requires-Dist: click>=8.0.1 ; extra == 'pep8test'
Provides-Extra: ssh
Provides-Extra: nox
Provides-Extra: test
Provides-Extra: test-randomorder
Provides-Extra: docs
Provides-Extra: docstest
Provides-Extra: sdist
Provides-Extra: pep8test
License-File: LICENSE
License-File: LICENSE.APACHE
License-File: LICENSE.BSD
Requires-Dist: cffi >=1.12
Provides-Extra: docs
Requires-Dist: sphinx >=5.3.0 ; extra == 'docs'
Requires-Dist: sphinx-rtd-theme >=1.1.1 ; extra == 'docs'
Provides-Extra: docstest
Requires-Dist: pyenchant >=1.6.11 ; extra == 'docstest'
Requires-Dist: twine >=1.12.0 ; extra == 'docstest'
Requires-Dist: sphinxcontrib-spelling >=4.0.1 ; extra == 'docstest'
Provides-Extra: nox
Requires-Dist: nox ; extra == 'nox'
Provides-Extra: pep8test
Requires-Dist: black ; extra == 'pep8test'
Requires-Dist: ruff ; extra == 'pep8test'
Requires-Dist: mypy ; extra == 'pep8test'
Requires-Dist: check-sdist ; extra == 'pep8test'
Provides-Extra: sdist
Requires-Dist: build ; extra == 'sdist'
Provides-Extra: ssh
Requires-Dist: bcrypt >=3.1.5 ; extra == 'ssh'
Provides-Extra: test
Requires-Dist: pytest >=6.2.0 ; extra == 'test'
Requires-Dist: pytest-benchmark ; extra == 'test'
Requires-Dist: pytest-cov ; extra == 'test'
Requires-Dist: pytest-xdist ; extra == 'test'
Requires-Dist: pretend ; extra == 'test'
Provides-Extra: test-randomorder
Requires-Dist: pytest-randomly ; extra == 'test-randomorder'
Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers.
Author-email: The Python Cryptographic Authority and individual contributors <cryptography-dev@python.org>
License-Expression: Apache-2.0 OR BSD-3-Clause
Requires-Python: >=3.8, !=3.9.0, !=3.9.1
Description-Content-Type: text/x-rst; charset=UTF-8
Project-URL: homepage, https://github.com/pyca/cryptography
Project-URL: documentation, https://cryptography.io/
Project-URL: source, https://github.com/pyca/cryptography/
Project-URL: issues, https://github.com/pyca/cryptography/issues
Project-URL: changelog, https://cryptography.io/en/latest/changelog/
pyca/cryptography
=================
@@ -74,13 +80,12 @@ pyca/cryptography
:target: https://cryptography.io
:alt: Latest Docs
.. image:: https://github.com/pyca/cryptography/workflows/CI/badge.svg?branch=main
:target: https://github.com/pyca/cryptography/actions?query=workflow%3ACI+branch%3Amain
.. image:: https://github.com/pyca/cryptography/actions/workflows/ci.yml/badge.svg
:target: https://github.com/pyca/cryptography/actions/workflows/ci.yml?query=branch%3Amain
``cryptography`` is a package which provides cryptographic recipes and
primitives to Python developers. Our goal is for it to be your "cryptographic
standard library". It supports Python 3.7+ and PyPy3 7.3.10+.
standard library". It supports Python 3.8+ and PyPy3 7.3.11+.
``cryptography`` includes both high level recipes and low level interfaces to
common cryptographic algorithms such as symmetric ciphers, message digests, and
@@ -131,3 +136,4 @@ documentation.
.. _`issue tracker`: https://github.com/pyca/cryptography/issues
.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev
.. _`security reporting`: https://cryptography.io/en/latest/security/

View File

@@ -0,0 +1,180 @@
cryptography-46.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
cryptography-46.0.3.dist-info/METADATA,sha256=bx2LyCEmOVUC8FH5hsGEZewWPiZoIIYTq0hM9mu9r4s,5748
cryptography-46.0.3.dist-info/RECORD,,
cryptography-46.0.3.dist-info/WHEEL,sha256=jkxrJemT4jZpYSr-u9xPalWqoow8benNmiXfjKXLlJw,108
cryptography-46.0.3.dist-info/licenses/LICENSE,sha256=Pgx8CRqUi4JTO6mP18u0BDLW8amsv4X1ki0vmak65rs,197
cryptography-46.0.3.dist-info/licenses/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360
cryptography-46.0.3.dist-info/licenses/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532
cryptography/__about__.py,sha256=QCLxNH_Abbygdc9RQGpUmrK14Wp3Cl_SEiB2byLwyxo,445
cryptography/__init__.py,sha256=mthuUrTd4FROCpUYrTIqhjz6s6T9djAZrV7nZ1oMm2o,364
cryptography/__pycache__/__about__.cpython-312.pyc,,
cryptography/__pycache__/__init__.cpython-312.pyc,,
cryptography/__pycache__/exceptions.cpython-312.pyc,,
cryptography/__pycache__/fernet.cpython-312.pyc,,
cryptography/__pycache__/utils.cpython-312.pyc,,
cryptography/exceptions.py,sha256=835EWILc2fwxw-gyFMriciC2SqhViETB10LBSytnDIc,1087
cryptography/fernet.py,sha256=3Cvxkh0KJSbX8HbnCHu4wfCW7U0GgfUA3v_qQ8a8iWc,6963
cryptography/hazmat/__init__.py,sha256=5IwrLWrVp0AjEr_4FdWG_V057NSJGY_W4egNNsuct0g,455
cryptography/hazmat/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/__pycache__/_oid.cpython-312.pyc,,
cryptography/hazmat/_oid.py,sha256=p8ThjwJB56Ci_rAIrjyJ1f8VjgD6e39es2dh8JIUBOw,17240
cryptography/hazmat/asn1/__init__.py,sha256=hS_EWx3wVvZzfbCcNV8hzcDnyMM8H-BhIoS1TipUosk,293
cryptography/hazmat/asn1/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/asn1/__pycache__/asn1.cpython-312.pyc,,
cryptography/hazmat/asn1/asn1.py,sha256=eMEThEXa19LQjcyVofgHsW6tsZnjp3ddH7bWkkcxfLM,3860
cryptography/hazmat/backends/__init__.py,sha256=O5jvKFQdZnXhKeqJ-HtulaEL9Ni7mr1mDzZY5kHlYhI,361
cryptography/hazmat/backends/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__init__.py,sha256=p3jmJfnCag9iE5sdMrN6VvVEu55u46xaS_IjoI0SrmA,305
cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-312.pyc,,
cryptography/hazmat/backends/openssl/backend.py,sha256=tV5AxBoFJ2GfA0DMWSY-0TxQJrpQoexzI9R4Kybb--4,10215
cryptography/hazmat/bindings/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/bindings/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/bindings/_rust.abi3.so,sha256=4bUN0J2p_ZQMdgmAc9eL0VMj_lgbTsHUmX4doekVIJ4,12955672
cryptography/hazmat/bindings/_rust/__init__.pyi,sha256=KhqLhXFPArPzzJ7DYO9Fl8FoXB_BagAd_r4Dm_Ze9Xo,1257
cryptography/hazmat/bindings/_rust/_openssl.pyi,sha256=mpNJLuYLbCVrd5i33FBTmWwL_55Dw7JPkSLlSX9Q7oI,230
cryptography/hazmat/bindings/_rust/asn1.pyi,sha256=BrGjC8J6nwuS-r3EVcdXJB8ndotfY9mbQYOfpbPG0HA,354
cryptography/hazmat/bindings/_rust/declarative_asn1.pyi,sha256=2ECFmYue1EPkHEE2Bm7aLwkjB0mSUTpr23v9MN4pri4,892
cryptography/hazmat/bindings/_rust/exceptions.pyi,sha256=exXr2xw_0pB1kk93cYbM3MohbzoUkjOms1ZMUi0uQZE,640
cryptography/hazmat/bindings/_rust/ocsp.pyi,sha256=VPVWuKHI9EMs09ZLRYAGvR0Iz0mCMmEzXAkgJHovpoM,4020
cryptography/hazmat/bindings/_rust/openssl/__init__.pyi,sha256=iOAMDyHoNwwCSZfZzuXDr64g4GpGUeDgEN-LjXqdrBM,1522
cryptography/hazmat/bindings/_rust/openssl/aead.pyi,sha256=4Nddw6-ynzIB3w2W86WvkGKTLlTDk_6F5l54RHCuy3E,2688
cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi,sha256=LhPzHWSXJq4grAJXn6zSvSSdV-aYIIscHDwIPlJGGPs,1315
cryptography/hazmat/bindings/_rust/openssl/cmac.pyi,sha256=nPH0X57RYpsAkRowVpjQiHE566ThUTx7YXrsadmrmHk,564
cryptography/hazmat/bindings/_rust/openssl/dh.pyi,sha256=Z3TC-G04-THtSdAOPLM1h2G7ml5bda1ElZUcn5wpuhk,1564
cryptography/hazmat/bindings/_rust/openssl/dsa.pyi,sha256=qBtkgj2albt2qFcnZ9UDrhzoNhCVO7HTby5VSf1EXMI,1299
cryptography/hazmat/bindings/_rust/openssl/ec.pyi,sha256=zJy0pRa5n-_p2dm45PxECB_-B6SVZyNKfjxFDpPqT38,1691
cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi,sha256=VXfXd5G6hUivg399R1DYdmW3eTb0EebzDTqjRC2gaRw,532
cryptography/hazmat/bindings/_rust/openssl/ed448.pyi,sha256=Yx49lqdnjsD7bxiDV1kcaMrDktug5evi5a6zerMiy2s,514
cryptography/hazmat/bindings/_rust/openssl/hashes.pyi,sha256=OWZvBx7xfo_HJl41Nc--DugVyCVPIprZ3HlOPTSWH9g,984
cryptography/hazmat/bindings/_rust/openssl/hmac.pyi,sha256=BXZn7NDjL3JAbYW0SQ8pg1iyC5DbQXVhUAiwsi8DFR8,702
cryptography/hazmat/bindings/_rust/openssl/kdf.pyi,sha256=xXfFBb9QehHfDtEaxV_65Z0YK7NquOVIChpTLkgAs_k,2029
cryptography/hazmat/bindings/_rust/openssl/keys.pyi,sha256=teIt8M6ZEMJrn4s3W0UnW0DZ-30Jd68WnSsKKG124l0,912
cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi,sha256=_SW9NtQ5FDlAbdclFtWpT4lGmxKIKHpN-4j8J2BzYfQ,585
cryptography/hazmat/bindings/_rust/openssl/rsa.pyi,sha256=2OQCNSXkxgc-3uw1xiCCloIQTV6p9_kK79Yu0rhZgPc,1364
cryptography/hazmat/bindings/_rust/openssl/x25519.pyi,sha256=ewn4GpQyb7zPwE-ni7GtyQgMC0A1mLuqYsSyqv6nI_s,523
cryptography/hazmat/bindings/_rust/openssl/x448.pyi,sha256=juTZTmli8jO_5Vcufg-vHvx_tCyezmSLIh_9PU3TczI,505
cryptography/hazmat/bindings/_rust/pkcs12.pyi,sha256=vEEd5wDiZvb8ZGFaziLCaWLzAwoG_tvPUxLQw5_uOl8,1605
cryptography/hazmat/bindings/_rust/pkcs7.pyi,sha256=txGBJijqZshEcqra6byPNbnisIdlxzOSIHP2hl9arPs,1601
cryptography/hazmat/bindings/_rust/test_support.pyi,sha256=PPhld-WkO743iXFPebeG0LtgK0aTzGdjcIsay1Gm5GE,757
cryptography/hazmat/bindings/_rust/x509.pyi,sha256=n9X0IQ6ICbdIi-ExdCFZoBgeY6njm3QOVAVZwDQdnbk,9784
cryptography/hazmat/bindings/openssl/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-312.pyc,,
cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-312.pyc,,
cryptography/hazmat/bindings/openssl/_conditional.py,sha256=DMOpA_XN4l70zTc5_J9DpwlbQeUBRTWpfIJ4yRIn1-U,5791
cryptography/hazmat/bindings/openssl/binding.py,sha256=x8eocEmukO4cm7cHqfVmOoYY7CCXdoF1v1WhZQt9neo,4610
cryptography/hazmat/decrepit/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216
cryptography/hazmat/decrepit/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/decrepit/ciphers/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216
cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-312.pyc,,
cryptography/hazmat/decrepit/ciphers/algorithms.py,sha256=YrKgHS4MfwWaMmPBYRymRRlC0phwWp9ycICFezeJPGk,2595
cryptography/hazmat/primitives/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/primitives/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/_serialization.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/cmac.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/constant_time.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/hashes.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/hmac.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/keywrap.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/padding.cpython-312.pyc,,
cryptography/hazmat/primitives/__pycache__/poly1305.cpython-312.pyc,,
cryptography/hazmat/primitives/_asymmetric.py,sha256=RhgcouUB6HTiFDBrR1LxqkMjpUxIiNvQ1r_zJjRG6qQ,532
cryptography/hazmat/primitives/_cipheralgorithm.py,sha256=Eh3i7lwedHfi0eLSsH93PZxQKzY9I6lkK67vL4V5tOc,1522
cryptography/hazmat/primitives/_serialization.py,sha256=chgPCSF2jxI2Cr5gB-qbWXOvOfupBh4CARS0KAhv9AM,5123
cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-312.pyc,,
cryptography/hazmat/primitives/asymmetric/dh.py,sha256=0v_vEFFz5pQ1QG-FkWDyvgv7IfuVZSH5Q6LyFI5A8rg,3645
cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=Ld_bbbqQFz12dObHxIkzEQzX0SWWP41RLSWkYSaKhqE,4213
cryptography/hazmat/primitives/asymmetric/ec.py,sha256=Vf5ig2PcS3PVnsb5N49Kx1uIkFBJyhg4BWXThDz5cug,12999
cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=jZW5cs472wXXV3eB0sE1b8w64gdazwwU0_MT5UOTiXs,3700
cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=yAetgn2f2JYf0BO8MapGzXeThsvSMG5LmUCrxVOidAA,3729
cryptography/hazmat/primitives/asymmetric/padding.py,sha256=vQ6l6gOg9HqcbOsvHrSiJRVLdEj9L4m4HkRGYziTyFA,2854
cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=ZnKOo2f34MCCOupC03Y1uR-_jiSG5IrelHEmxaME3D4,8303
cryptography/hazmat/primitives/asymmetric/types.py,sha256=LnsOJym-wmPUJ7Knu_7bCNU3kIiELCd6krOaW_JU08I,2996
cryptography/hazmat/primitives/asymmetric/utils.py,sha256=DPTs6T4F-UhwzFQTh-1fSEpQzazH2jf2xpIro3ItF4o,790
cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=_4nQeZ3yJ3Lg0RpXnaqA-1yt6vbx1F-wzLcaZHwSpeE,3613
cryptography/hazmat/primitives/asymmetric/x448.py,sha256=WKBLtuVfJqiBRro654fGaQAlvsKbqbNkK7c4A_ZCdV0,3642
cryptography/hazmat/primitives/ciphers/__init__.py,sha256=eyEXmjk6_CZXaOPYDr7vAYGXr29QvzgWL2-4CSolLFs,680
cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-312.pyc,,
cryptography/hazmat/primitives/ciphers/aead.py,sha256=Fzlyx7w8KYQakzDp1zWgJnIr62zgZrgVh1u2h4exB54,634
cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=Q7ZJwcsx83Mgxv5y7r6CyJKSdsOwC-my-5A67-ma2vw,3407
cryptography/hazmat/primitives/ciphers/base.py,sha256=aBC7HHBBoixebmparVr0UlODs3VD0A7B6oz_AaRjDv8,4253
cryptography/hazmat/primitives/ciphers/modes.py,sha256=20stpwhDtbAvpH0SMf9EDHIciwmTF-JMBUOZ9bU8WiQ,8318
cryptography/hazmat/primitives/cmac.py,sha256=sz_s6H_cYnOvx-VNWdIKhRhe3Ymp8z8J0D3CBqOX3gg,338
cryptography/hazmat/primitives/constant_time.py,sha256=xdunWT0nf8OvKdcqUhhlFKayGp4_PgVJRU2W1wLSr_A,422
cryptography/hazmat/primitives/hashes.py,sha256=M8BrlKB3U6DEtHvWTV5VRjpteHv1kS3Zxm_Bsk04cr8,5184
cryptography/hazmat/primitives/hmac.py,sha256=RpB3z9z5skirCQrm7zQbtnp9pLMnAjrlTUvKqF5aDDc,423
cryptography/hazmat/primitives/kdf/__init__.py,sha256=4XibZnrYq4hh5xBjWiIXzaYW6FKx8hPbVaa_cB9zS64,750
cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/argon2.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-312.pyc,,
cryptography/hazmat/primitives/kdf/argon2.py,sha256=UFDNXG0v-rw3DqAQTB1UQAsQC2M5Ejg0k_6OCyhLKus,460
cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=Ua8KoLXXnzgsrAUmHpyKymaPt8aPRP0EHEaBz7QCQ9I,3737
cryptography/hazmat/primitives/kdf/hkdf.py,sha256=M0lAEfRoc4kpp4-nwDj9yB-vNZukIOYEQrUlWsBNn9o,543
cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=oZepvo4evhKkkJQWRDwaPoIbyTaFmDc5NPimxg6lfKg,9165
cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=1WIwhELR0w8ztTpTu8BrFiYWmK3hUfJq08I79TxwieE,1957
cryptography/hazmat/primitives/kdf/scrypt.py,sha256=XyWUdUUmhuI9V6TqAPOvujCSMGv1XQdg0a21IWCmO-U,590
cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=zLTcF665QFvXX2f8TS7fmBZTteXpFjKahzfjjQcCJyw,1999
cryptography/hazmat/primitives/keywrap.py,sha256=XV4Pj2fqSeD-RqZVvY2cA3j5_7RwJSFygYuLfk2ujCo,5650
cryptography/hazmat/primitives/padding.py,sha256=QT-U-NvV2eQGO1wVPbDiNGNSc9keRDS-ig5cQOrLz0E,1865
cryptography/hazmat/primitives/poly1305.py,sha256=P5EPQV-RB_FJPahpg01u0Ts4S_PnAmsroxIGXbGeRRo,355
cryptography/hazmat/primitives/serialization/__init__.py,sha256=Q7uTgDlt7n3WfsMT6jYwutC6DIg_7SEeoAm1GHZ5B5E,1705
cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-312.pyc,,
cryptography/hazmat/primitives/serialization/base.py,sha256=ikq5MJIwp_oUnjiaBco_PmQwOTYuGi-XkYUYHKy8Vo0,615
cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=mS9cFNG4afzvseoc5e1MWoY2VskfL8N8Y_OFjl67luY,5104
cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=5OR_Tkysxaprn4FegvJIfbep9rJ9wok6FLWvWwQ5-Mg,13943
cryptography/hazmat/primitives/serialization/ssh.py,sha256=hPV5obFznz0QhFfXFPOeQ8y6MsurA0xVMQiLnLESEs8,53700
cryptography/hazmat/primitives/twofactor/__init__.py,sha256=tmMZGB-g4IU1r7lIFqASU019zr0uPp_wEBYcwdDCKCA,258
cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-312.pyc,,
cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-312.pyc,,
cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-312.pyc,,
cryptography/hazmat/primitives/twofactor/hotp.py,sha256=ivZo5BrcCGWLsqql4nZV0XXCjyGPi_iHfDFltGlOJwk,3256
cryptography/hazmat/primitives/twofactor/totp.py,sha256=m5LPpRL00kp4zY8gTjr55Hfz9aMlPS53kHmVkSQCmdY,1652
cryptography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
cryptography/utils.py,sha256=bZAjFC5KVpfmF29qS_18vvpW3mKxmdiRALcusHhTTkg,4301
cryptography/x509/__init__.py,sha256=xloN0swseNx-m2WFZmCA17gOoxQWqeU82UVjEdJBePQ,8257
cryptography/x509/__pycache__/__init__.cpython-312.pyc,,
cryptography/x509/__pycache__/base.cpython-312.pyc,,
cryptography/x509/__pycache__/certificate_transparency.cpython-312.pyc,,
cryptography/x509/__pycache__/extensions.cpython-312.pyc,,
cryptography/x509/__pycache__/general_name.cpython-312.pyc,,
cryptography/x509/__pycache__/name.cpython-312.pyc,,
cryptography/x509/__pycache__/ocsp.cpython-312.pyc,,
cryptography/x509/__pycache__/oid.cpython-312.pyc,,
cryptography/x509/__pycache__/verification.cpython-312.pyc,,
cryptography/x509/base.py,sha256=OrmTw3y8B6AE_nGXQPN8x9kq-d7rDWeH13gCq6T6D6U,27997
cryptography/x509/certificate_transparency.py,sha256=JqoOIDhlwInrYMFW6IFn77WJ0viF-PB_rlZV3vs9MYc,797
cryptography/x509/extensions.py,sha256=QxYrqR6SF1qzR9ZraP8wDiIczlEVlAFuwDRVcltB6Tk,77724
cryptography/x509/general_name.py,sha256=sP_rV11Qlpsk4x3XXGJY_Mv0Q_s9dtjeLckHsjpLQoQ,7836
cryptography/x509/name.py,sha256=ty0_xf0LnHwZAdEf-d8FLO1K4hGqx_7DsD3CHwoLJiY,15101
cryptography/x509/ocsp.py,sha256=Yey6NdFV1MPjop24Mj_VenjEpg3kUaMopSWOK0AbeBs,12699
cryptography/x509/oid.py,sha256=BUzgXXGVWilkBkdKPTm9R4qElE9gAGHgdYPMZAp7PJo,931
cryptography/x509/verification.py,sha256=gR2C2c-XZQtblZhT5T5vjSKOtCb74ef2alPVmEcwFlM,958

View File

@@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: maturin (1.9.4)
Root-Is-Purelib: false
Tag: cp311-abi3-manylinux_2_34_x86_64

View File

@@ -5,13 +5,13 @@
from __future__ import annotations
__all__ = [
"__version__",
"__author__",
"__copyright__",
"__version__",
]
__version__ = "41.0.7"
__version__ = "46.0.3"
__author__ = "The Python Cryptographic Authority and individual contributors"
__copyright__ = f"Copyright 2013-2023 {__author__}"
__copyright__ = f"Copyright 2013-2025 {__author__}"

View File

@@ -7,7 +7,7 @@ from __future__ import annotations
from cryptography.__about__ import __author__, __copyright__, __version__
__all__ = [
"__version__",
"__author__",
"__copyright__",
"__version__",
]

View File

@@ -15,9 +15,7 @@ _Reasons = rust_exceptions._Reasons
class UnsupportedAlgorithm(Exception):
def __init__(
self, message: str, reason: typing.Optional[_Reasons] = None
) -> None:
def __init__(self, message: str, reason: _Reasons | None = None) -> None:
super().__init__(message)
self._reason = reason
@@ -44,7 +42,7 @@ class InvalidSignature(Exception):
class InternalError(Exception):
def __init__(
self, msg: str, err_code: typing.List[rust_openssl.OpenSSLError]
self, msg: str, err_code: list[rust_openssl.OpenSSLError]
) -> None:
super().__init__(msg)
self.err_code = err_code

View File

@@ -9,6 +9,7 @@ import binascii
import os
import time
import typing
from collections.abc import Iterable
from cryptography import utils
from cryptography.exceptions import InvalidSignature
@@ -27,7 +28,7 @@ _MAX_CLOCK_SKEW = 60
class Fernet:
def __init__(
self,
key: typing.Union[bytes, str],
key: bytes | str,
backend: typing.Any = None,
) -> None:
try:
@@ -80,9 +81,7 @@ class Fernet:
hmac = h.finalize()
return base64.urlsafe_b64encode(basic_parts + hmac)
def decrypt(
self, token: typing.Union[bytes, str], ttl: typing.Optional[int] = None
) -> bytes:
def decrypt(self, token: bytes | str, ttl: int | None = None) -> bytes:
timestamp, data = Fernet._get_unverified_token_data(token)
if ttl is None:
time_info = None
@@ -91,7 +90,7 @@ class Fernet:
return self._decrypt_data(data, timestamp, time_info)
def decrypt_at_time(
self, token: typing.Union[bytes, str], ttl: int, current_time: int
self, token: bytes | str, ttl: int, current_time: int
) -> bytes:
if ttl is None:
raise ValueError(
@@ -100,16 +99,14 @@ class Fernet:
timestamp, data = Fernet._get_unverified_token_data(token)
return self._decrypt_data(data, timestamp, (ttl, current_time))
def extract_timestamp(self, token: typing.Union[bytes, str]) -> int:
def extract_timestamp(self, token: bytes | str) -> int:
timestamp, data = Fernet._get_unverified_token_data(token)
# Verify the token was not tampered with.
self._verify_signature(data)
return timestamp
@staticmethod
def _get_unverified_token_data(
token: typing.Union[bytes, str]
) -> typing.Tuple[int, bytes]:
def _get_unverified_token_data(token: bytes | str) -> tuple[int, bytes]:
if not isinstance(token, (str, bytes)):
raise TypeError("token must be bytes or str")
@@ -139,7 +136,7 @@ class Fernet:
self,
data: bytes,
timestamp: int,
time_info: typing.Optional[typing.Tuple[int, int]],
time_info: tuple[int, int] | None,
) -> bytes:
if time_info is not None:
ttl, current_time = time_info
@@ -172,7 +169,7 @@ class Fernet:
class MultiFernet:
def __init__(self, fernets: typing.Iterable[Fernet]):
def __init__(self, fernets: Iterable[Fernet]):
fernets = list(fernets)
if not fernets:
raise ValueError(
@@ -186,7 +183,7 @@ class MultiFernet:
def encrypt_at_time(self, msg: bytes, current_time: int) -> bytes:
return self._fernets[0].encrypt_at_time(msg, current_time)
def rotate(self, msg: typing.Union[bytes, str]) -> bytes:
def rotate(self, msg: bytes | str) -> bytes:
timestamp, data = Fernet._get_unverified_token_data(msg)
for f in self._fernets:
try:
@@ -200,9 +197,7 @@ class MultiFernet:
iv = os.urandom(16)
return self._fernets[0]._encrypt_from_parts(p, timestamp, iv)
def decrypt(
self, msg: typing.Union[bytes, str], ttl: typing.Optional[int] = None
) -> bytes:
def decrypt(self, msg: bytes | str, ttl: int | None = None) -> bytes:
for f in self._fernets:
try:
return f.decrypt(msg, ttl)
@@ -211,7 +206,7 @@ class MultiFernet:
raise InvalidToken
def decrypt_at_time(
self, msg: typing.Union[bytes, str], ttl: int, current_time: int
self, msg: bytes | str, ttl: int, current_time: int
) -> bytes:
for f in self._fernets:
try:
@@ -219,3 +214,11 @@ class MultiFernet:
except InvalidToken:
pass
raise InvalidToken
def extract_timestamp(self, msg: bytes | str) -> int:
for f in self._fernets:
try:
return f.extract_timestamp(msg)
except InvalidToken:
pass
raise InvalidToken

View File

@@ -4,8 +4,6 @@
from __future__ import annotations
import typing
from cryptography.hazmat.bindings._rust import (
ObjectIdentifier as ObjectIdentifier,
)
@@ -16,6 +14,7 @@ class ExtensionOID:
SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9")
SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14")
KEY_USAGE = ObjectIdentifier("2.5.29.15")
PRIVATE_KEY_USAGE_PERIOD = ObjectIdentifier("2.5.29.16")
SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17")
ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18")
BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19")
@@ -41,6 +40,7 @@ class ExtensionOID:
PRECERT_POISON = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3")
SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.5")
MS_CERTIFICATE_TEMPLATE = ObjectIdentifier("1.3.6.1.4.1.311.21.7")
ADMISSIONS = ObjectIdentifier("1.3.36.8.3.3")
class OCSPExtensionOID:
@@ -60,6 +60,7 @@ class NameOID:
LOCALITY_NAME = ObjectIdentifier("2.5.4.7")
STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8")
STREET_ADDRESS = ObjectIdentifier("2.5.4.9")
ORGANIZATION_IDENTIFIER = ObjectIdentifier("2.5.4.97")
ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10")
ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11")
SERIAL_NUMBER = ObjectIdentifier("2.5.4.5")
@@ -123,9 +124,7 @@ class SignatureAlgorithmOID:
GOSTR3410_2012_WITH_3411_2012_512 = ObjectIdentifier("1.2.643.7.1.1.3.3")
_SIG_OIDS_TO_HASH: typing.Dict[
ObjectIdentifier, typing.Optional[hashes.HashAlgorithm]
] = {
_SIG_OIDS_TO_HASH: dict[ObjectIdentifier, hashes.HashAlgorithm | None] = {
SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(),
SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(),
SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(),
@@ -157,6 +156,33 @@ _SIG_OIDS_TO_HASH: typing.Dict[
}
class HashAlgorithmOID:
SHA1 = ObjectIdentifier("1.3.14.3.2.26")
SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.2.4")
SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.2.1")
SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.2.2")
SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.2.3")
SHA3_224 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.224")
SHA3_256 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.256")
SHA3_384 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.384")
SHA3_512 = ObjectIdentifier("1.3.6.1.4.1.37476.3.2.1.99.7.512")
SHA3_224_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.7")
SHA3_256_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.8")
SHA3_384_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.9")
SHA3_512_NIST = ObjectIdentifier("2.16.840.1.101.3.4.2.10")
class PublicKeyAlgorithmOID:
DSA = ObjectIdentifier("1.2.840.10040.4.1")
EC_PUBLIC_KEY = ObjectIdentifier("1.2.840.10045.2.1")
RSAES_PKCS1_v1_5 = ObjectIdentifier("1.2.840.113549.1.1.1")
RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10")
X25519 = ObjectIdentifier("1.3.101.110")
X448 = ObjectIdentifier("1.3.101.111")
ED25519 = ObjectIdentifier("1.3.101.112")
ED448 = ObjectIdentifier("1.3.101.113")
class ExtendedKeyUsageOID:
SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1")
CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2")
@@ -168,9 +194,20 @@ class ExtendedKeyUsageOID:
SMARTCARD_LOGON = ObjectIdentifier("1.3.6.1.4.1.311.20.2.2")
KERBEROS_PKINIT_KDC = ObjectIdentifier("1.3.6.1.5.2.3.5")
IPSEC_IKE = ObjectIdentifier("1.3.6.1.5.5.7.3.17")
BUNDLE_SECURITY = ObjectIdentifier("1.3.6.1.5.5.7.3.35")
CERTIFICATE_TRANSPARENCY = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.4")
class OtherNameFormOID:
PERMANENT_IDENTIFIER = ObjectIdentifier("1.3.6.1.5.5.7.8.3")
HW_MODULE_NAME = ObjectIdentifier("1.3.6.1.5.5.7.8.4")
DNS_SRV = ObjectIdentifier("1.3.6.1.5.5.7.8.7")
NAI_REALM = ObjectIdentifier("1.3.6.1.5.5.7.8.8")
SMTP_UTF8_MAILBOX = ObjectIdentifier("1.3.6.1.5.5.7.8.9")
ACP_NODE_NAME = ObjectIdentifier("1.3.6.1.5.5.7.8.10")
BUNDLE_EID = ObjectIdentifier("1.3.6.1.5.5.7.8.11")
class AuthorityInformationAccessOID:
CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2")
OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1")
@@ -228,7 +265,7 @@ _OID_NAMES = {
SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption",
SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption",
SignatureAlgorithmOID.RSASSA_PSS: "RSASSA-PSS",
SignatureAlgorithmOID.RSASSA_PSS: "rsassaPss",
SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1",
SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224",
SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256",
@@ -248,6 +285,24 @@ _OID_NAMES = {
SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: (
"GOST R 34.10-2012 with GOST R 34.11-2012 (512 bit)"
),
HashAlgorithmOID.SHA1: "sha1",
HashAlgorithmOID.SHA224: "sha224",
HashAlgorithmOID.SHA256: "sha256",
HashAlgorithmOID.SHA384: "sha384",
HashAlgorithmOID.SHA512: "sha512",
HashAlgorithmOID.SHA3_224: "sha3_224",
HashAlgorithmOID.SHA3_256: "sha3_256",
HashAlgorithmOID.SHA3_384: "sha3_384",
HashAlgorithmOID.SHA3_512: "sha3_512",
HashAlgorithmOID.SHA3_224_NIST: "sha3_224",
HashAlgorithmOID.SHA3_256_NIST: "sha3_256",
HashAlgorithmOID.SHA3_384_NIST: "sha3_384",
HashAlgorithmOID.SHA3_512_NIST: "sha3_512",
PublicKeyAlgorithmOID.DSA: "dsaEncryption",
PublicKeyAlgorithmOID.EC_PUBLIC_KEY: "id-ecPublicKey",
PublicKeyAlgorithmOID.RSAES_PKCS1_v1_5: "rsaEncryption",
PublicKeyAlgorithmOID.X25519: "X25519",
PublicKeyAlgorithmOID.X448: "X448",
ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth",
ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth",
ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning",
@@ -259,6 +314,7 @@ _OID_NAMES = {
ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes",
ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier",
ExtensionOID.KEY_USAGE: "keyUsage",
ExtensionOID.PRIVATE_KEY_USAGE_PERIOD: "privateKeyUsagePeriod",
ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName",
ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName",
ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints",
@@ -270,6 +326,7 @@ _OID_NAMES = {
),
ExtensionOID.PRECERT_POISON: "ctPoison",
ExtensionOID.MS_CERTIFICATE_TEMPLATE: "msCertificateTemplate",
ExtensionOID.ADMISSIONS: "Admissions",
CRLEntryExtensionOID.CRL_REASON: "cRLReason",
CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate",
CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer",
@@ -282,7 +339,7 @@ _OID_NAMES = {
ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage",
ExtensionOID.FRESHEST_CRL: "freshestCRL",
ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy",
ExtensionOID.ISSUING_DISTRIBUTION_POINT: ("issuingDistributionPoint"),
ExtensionOID.ISSUING_DISTRIBUTION_POINT: "issuingDistributionPoint",
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess",
ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess",
ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck",

View File

@@ -0,0 +1,10 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from cryptography.hazmat.asn1.asn1 import encode_der, sequence
__all__ = [
"encode_der",
"sequence",
]

View File

@@ -0,0 +1,116 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import annotations
import dataclasses
import sys
import typing
if sys.version_info < (3, 11):
import typing_extensions
# We use the `include_extras` parameter of `get_type_hints`, which was
# added in Python 3.9. This can be replaced by the `typing` version
# once the min version is >= 3.9
if sys.version_info < (3, 9):
get_type_hints = typing_extensions.get_type_hints
else:
get_type_hints = typing.get_type_hints
else:
get_type_hints = typing.get_type_hints
from cryptography.hazmat.bindings._rust import declarative_asn1
T = typing.TypeVar("T", covariant=True)
U = typing.TypeVar("U")
encode_der = declarative_asn1.encode_der
def _normalize_field_type(
field_type: typing.Any, field_name: str
) -> declarative_asn1.AnnotatedType:
annotation = declarative_asn1.Annotation()
if hasattr(field_type, "__asn1_root__"):
annotated_root = field_type.__asn1_root__
if not isinstance(annotated_root, declarative_asn1.AnnotatedType):
raise TypeError(f"unsupported root type: {annotated_root}")
return annotated_root
else:
rust_field_type = declarative_asn1.non_root_python_to_rust(field_type)
return declarative_asn1.AnnotatedType(rust_field_type, annotation)
def _annotate_fields(
raw_fields: dict[str, type],
) -> dict[str, declarative_asn1.AnnotatedType]:
fields = {}
for field_name, field_type in raw_fields.items():
# Recursively normalize the field type into something that the
# Rust code can understand.
annotated_field_type = _normalize_field_type(field_type, field_name)
fields[field_name] = annotated_field_type
return fields
def _register_asn1_sequence(cls: type[U]) -> None:
raw_fields = get_type_hints(cls, include_extras=True)
root = declarative_asn1.AnnotatedType(
declarative_asn1.Type.Sequence(cls, _annotate_fields(raw_fields)),
declarative_asn1.Annotation(),
)
setattr(cls, "__asn1_root__", root)
# Due to https://github.com/python/mypy/issues/19731, we can't define an alias
# for `dataclass_transform` that conditionally points to `typing` or
# `typing_extensions` depending on the Python version (like we do for
# `get_type_hints`).
# We work around it by making the whole decorated class conditional on the
# Python version.
if sys.version_info < (3, 11):
@typing_extensions.dataclass_transform(kw_only_default=True)
def sequence(cls: type[U]) -> type[U]:
# We use `dataclasses.dataclass` to add an __init__ method
# to the class with keyword-only parameters.
if sys.version_info >= (3, 10):
dataclass_cls = dataclasses.dataclass(
repr=False,
eq=False,
# `match_args` was added in Python 3.10 and defaults
# to True
match_args=False,
# `kw_only` was added in Python 3.10 and defaults to
# False
kw_only=True,
)(cls)
else:
dataclass_cls = dataclasses.dataclass(
repr=False,
eq=False,
)(cls)
_register_asn1_sequence(dataclass_cls)
return dataclass_cls
else:
@typing.dataclass_transform(kw_only_default=True)
def sequence(cls: type[U]) -> type[U]:
# Only add an __init__ method, with keyword-only
# parameters.
dataclass_cls = dataclasses.dataclass(
repr=False,
eq=False,
match_args=False,
kw_only=True,
)(cls)
_register_asn1_sequence(dataclass_cls)
return dataclass_cls

View File

@@ -1,527 +0,0 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import annotations
import typing
from cryptography.exceptions import InvalidTag
if typing.TYPE_CHECKING:
from cryptography.hazmat.backends.openssl.backend import Backend
from cryptography.hazmat.primitives.ciphers.aead import (
AESCCM,
AESGCM,
AESOCB3,
AESSIV,
ChaCha20Poly1305,
)
_AEADTypes = typing.Union[
AESCCM, AESGCM, AESOCB3, AESSIV, ChaCha20Poly1305
]
def _is_evp_aead_supported_cipher(
backend: Backend, cipher: _AEADTypes
) -> bool:
"""
Checks whether the given cipher is supported through
EVP_AEAD rather than the normal OpenSSL EVP_CIPHER API.
"""
from cryptography.hazmat.primitives.ciphers.aead import ChaCha20Poly1305
return backend._lib.Cryptography_HAS_EVP_AEAD and isinstance(
cipher, ChaCha20Poly1305
)
def _aead_cipher_supported(backend: Backend, cipher: _AEADTypes) -> bool:
if _is_evp_aead_supported_cipher(backend, cipher):
return True
else:
cipher_name = _evp_cipher_cipher_name(cipher)
if backend._fips_enabled and cipher_name not in backend._fips_aead:
return False
# SIV isn't loaded through get_cipherbyname but instead a new fetch API
# only available in 3.0+. But if we know we're on 3.0+ then we know
# it's supported.
if cipher_name.endswith(b"-siv"):
return backend._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER == 1
else:
return (
backend._lib.EVP_get_cipherbyname(cipher_name)
!= backend._ffi.NULL
)
def _aead_create_ctx(
backend: Backend,
cipher: _AEADTypes,
key: bytes,
):
if _is_evp_aead_supported_cipher(backend, cipher):
return _evp_aead_create_ctx(backend, cipher, key)
else:
return _evp_cipher_create_ctx(backend, cipher, key)
def _encrypt(
backend: Backend,
cipher: _AEADTypes,
nonce: bytes,
data: bytes,
associated_data: typing.List[bytes],
tag_length: int,
ctx: typing.Any = None,
) -> bytes:
if _is_evp_aead_supported_cipher(backend, cipher):
return _evp_aead_encrypt(
backend, cipher, nonce, data, associated_data, tag_length, ctx
)
else:
return _evp_cipher_encrypt(
backend, cipher, nonce, data, associated_data, tag_length, ctx
)
def _decrypt(
backend: Backend,
cipher: _AEADTypes,
nonce: bytes,
data: bytes,
associated_data: typing.List[bytes],
tag_length: int,
ctx: typing.Any = None,
) -> bytes:
if _is_evp_aead_supported_cipher(backend, cipher):
return _evp_aead_decrypt(
backend, cipher, nonce, data, associated_data, tag_length, ctx
)
else:
return _evp_cipher_decrypt(
backend, cipher, nonce, data, associated_data, tag_length, ctx
)
def _evp_aead_create_ctx(
backend: Backend,
cipher: _AEADTypes,
key: bytes,
tag_len: typing.Optional[int] = None,
):
aead_cipher = _evp_aead_get_cipher(backend, cipher)
assert aead_cipher is not None
key_ptr = backend._ffi.from_buffer(key)
tag_len = (
backend._lib.EVP_AEAD_DEFAULT_TAG_LENGTH
if tag_len is None
else tag_len
)
ctx = backend._lib.Cryptography_EVP_AEAD_CTX_new(
aead_cipher, key_ptr, len(key), tag_len
)
backend.openssl_assert(ctx != backend._ffi.NULL)
ctx = backend._ffi.gc(ctx, backend._lib.EVP_AEAD_CTX_free)
return ctx
def _evp_aead_get_cipher(backend: Backend, cipher: _AEADTypes):
from cryptography.hazmat.primitives.ciphers.aead import (
ChaCha20Poly1305,
)
# Currently only ChaCha20-Poly1305 is supported using this API
assert isinstance(cipher, ChaCha20Poly1305)
return backend._lib.EVP_aead_chacha20_poly1305()
def _evp_aead_encrypt(
backend: Backend,
cipher: _AEADTypes,
nonce: bytes,
data: bytes,
associated_data: typing.List[bytes],
tag_length: int,
ctx: typing.Any,
) -> bytes:
assert ctx is not None
aead_cipher = _evp_aead_get_cipher(backend, cipher)
assert aead_cipher is not None
out_len = backend._ffi.new("size_t *")
# max_out_len should be in_len plus the result of
# EVP_AEAD_max_overhead.
max_out_len = len(data) + backend._lib.EVP_AEAD_max_overhead(aead_cipher)
out_buf = backend._ffi.new("uint8_t[]", max_out_len)
data_ptr = backend._ffi.from_buffer(data)
nonce_ptr = backend._ffi.from_buffer(nonce)
aad = b"".join(associated_data)
aad_ptr = backend._ffi.from_buffer(aad)
res = backend._lib.EVP_AEAD_CTX_seal(
ctx,
out_buf,
out_len,
max_out_len,
nonce_ptr,
len(nonce),
data_ptr,
len(data),
aad_ptr,
len(aad),
)
backend.openssl_assert(res == 1)
encrypted_data = backend._ffi.buffer(out_buf, out_len[0])[:]
return encrypted_data
def _evp_aead_decrypt(
backend: Backend,
cipher: _AEADTypes,
nonce: bytes,
data: bytes,
associated_data: typing.List[bytes],
tag_length: int,
ctx: typing.Any,
) -> bytes:
if len(data) < tag_length:
raise InvalidTag
assert ctx is not None
out_len = backend._ffi.new("size_t *")
# max_out_len should at least in_len
max_out_len = len(data)
out_buf = backend._ffi.new("uint8_t[]", max_out_len)
data_ptr = backend._ffi.from_buffer(data)
nonce_ptr = backend._ffi.from_buffer(nonce)
aad = b"".join(associated_data)
aad_ptr = backend._ffi.from_buffer(aad)
res = backend._lib.EVP_AEAD_CTX_open(
ctx,
out_buf,
out_len,
max_out_len,
nonce_ptr,
len(nonce),
data_ptr,
len(data),
aad_ptr,
len(aad),
)
if res == 0:
backend._consume_errors()
raise InvalidTag
decrypted_data = backend._ffi.buffer(out_buf, out_len[0])[:]
return decrypted_data
_ENCRYPT = 1
_DECRYPT = 0
def _evp_cipher_cipher_name(cipher: _AEADTypes) -> bytes:
from cryptography.hazmat.primitives.ciphers.aead import (
AESCCM,
AESGCM,
AESOCB3,
AESSIV,
ChaCha20Poly1305,
)
if isinstance(cipher, ChaCha20Poly1305):
return b"chacha20-poly1305"
elif isinstance(cipher, AESCCM):
return f"aes-{len(cipher._key) * 8}-ccm".encode("ascii")
elif isinstance(cipher, AESOCB3):
return f"aes-{len(cipher._key) * 8}-ocb".encode("ascii")
elif isinstance(cipher, AESSIV):
return f"aes-{len(cipher._key) * 8 // 2}-siv".encode("ascii")
else:
assert isinstance(cipher, AESGCM)
return f"aes-{len(cipher._key) * 8}-gcm".encode("ascii")
def _evp_cipher(cipher_name: bytes, backend: Backend):
if cipher_name.endswith(b"-siv"):
evp_cipher = backend._lib.EVP_CIPHER_fetch(
backend._ffi.NULL,
cipher_name,
backend._ffi.NULL,
)
backend.openssl_assert(evp_cipher != backend._ffi.NULL)
evp_cipher = backend._ffi.gc(evp_cipher, backend._lib.EVP_CIPHER_free)
else:
evp_cipher = backend._lib.EVP_get_cipherbyname(cipher_name)
backend.openssl_assert(evp_cipher != backend._ffi.NULL)
return evp_cipher
def _evp_cipher_create_ctx(
backend: Backend,
cipher: _AEADTypes,
key: bytes,
):
ctx = backend._lib.EVP_CIPHER_CTX_new()
backend.openssl_assert(ctx != backend._ffi.NULL)
ctx = backend._ffi.gc(ctx, backend._lib.EVP_CIPHER_CTX_free)
cipher_name = _evp_cipher_cipher_name(cipher)
evp_cipher = _evp_cipher(cipher_name, backend)
key_ptr = backend._ffi.from_buffer(key)
res = backend._lib.EVP_CipherInit_ex(
ctx,
evp_cipher,
backend._ffi.NULL,
key_ptr,
backend._ffi.NULL,
0,
)
backend.openssl_assert(res != 0)
return ctx
def _evp_cipher_aead_setup(
backend: Backend,
cipher_name: bytes,
key: bytes,
nonce: bytes,
tag: typing.Optional[bytes],
tag_len: int,
operation: int,
):
evp_cipher = _evp_cipher(cipher_name, backend)
ctx = backend._lib.EVP_CIPHER_CTX_new()
ctx = backend._ffi.gc(ctx, backend._lib.EVP_CIPHER_CTX_free)
res = backend._lib.EVP_CipherInit_ex(
ctx,
evp_cipher,
backend._ffi.NULL,
backend._ffi.NULL,
backend._ffi.NULL,
int(operation == _ENCRYPT),
)
backend.openssl_assert(res != 0)
# CCM requires the IVLEN to be set before calling SET_TAG on decrypt
res = backend._lib.EVP_CIPHER_CTX_ctrl(
ctx,
backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
len(nonce),
backend._ffi.NULL,
)
backend.openssl_assert(res != 0)
if operation == _DECRYPT:
assert tag is not None
_evp_cipher_set_tag(backend, ctx, tag)
elif cipher_name.endswith(b"-ccm"):
res = backend._lib.EVP_CIPHER_CTX_ctrl(
ctx,
backend._lib.EVP_CTRL_AEAD_SET_TAG,
tag_len,
backend._ffi.NULL,
)
backend.openssl_assert(res != 0)
nonce_ptr = backend._ffi.from_buffer(nonce)
key_ptr = backend._ffi.from_buffer(key)
res = backend._lib.EVP_CipherInit_ex(
ctx,
backend._ffi.NULL,
backend._ffi.NULL,
key_ptr,
nonce_ptr,
int(operation == _ENCRYPT),
)
backend.openssl_assert(res != 0)
return ctx
def _evp_cipher_set_tag(backend, ctx, tag: bytes) -> None:
tag_ptr = backend._ffi.from_buffer(tag)
res = backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag_ptr
)
backend.openssl_assert(res != 0)
def _evp_cipher_set_nonce_operation(
backend, ctx, nonce: bytes, operation: int
) -> None:
nonce_ptr = backend._ffi.from_buffer(nonce)
res = backend._lib.EVP_CipherInit_ex(
ctx,
backend._ffi.NULL,
backend._ffi.NULL,
backend._ffi.NULL,
nonce_ptr,
int(operation == _ENCRYPT),
)
backend.openssl_assert(res != 0)
def _evp_cipher_set_length(backend: Backend, ctx, data_len: int) -> None:
intptr = backend._ffi.new("int *")
res = backend._lib.EVP_CipherUpdate(
ctx, backend._ffi.NULL, intptr, backend._ffi.NULL, data_len
)
backend.openssl_assert(res != 0)
def _evp_cipher_process_aad(
backend: Backend, ctx, associated_data: bytes
) -> None:
outlen = backend._ffi.new("int *")
a_data_ptr = backend._ffi.from_buffer(associated_data)
res = backend._lib.EVP_CipherUpdate(
ctx, backend._ffi.NULL, outlen, a_data_ptr, len(associated_data)
)
backend.openssl_assert(res != 0)
def _evp_cipher_process_data(backend: Backend, ctx, data: bytes) -> bytes:
outlen = backend._ffi.new("int *")
buf = backend._ffi.new("unsigned char[]", len(data))
data_ptr = backend._ffi.from_buffer(data)
res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, data_ptr, len(data))
if res == 0:
# AES SIV can error here if the data is invalid on decrypt
backend._consume_errors()
raise InvalidTag
return backend._ffi.buffer(buf, outlen[0])[:]
def _evp_cipher_encrypt(
backend: Backend,
cipher: _AEADTypes,
nonce: bytes,
data: bytes,
associated_data: typing.List[bytes],
tag_length: int,
ctx: typing.Any = None,
) -> bytes:
from cryptography.hazmat.primitives.ciphers.aead import AESCCM, AESSIV
if ctx is None:
cipher_name = _evp_cipher_cipher_name(cipher)
ctx = _evp_cipher_aead_setup(
backend,
cipher_name,
cipher._key,
nonce,
None,
tag_length,
_ENCRYPT,
)
else:
_evp_cipher_set_nonce_operation(backend, ctx, nonce, _ENCRYPT)
# CCM requires us to pass the length of the data before processing
# anything.
# However calling this with any other AEAD results in an error
if isinstance(cipher, AESCCM):
_evp_cipher_set_length(backend, ctx, len(data))
for ad in associated_data:
_evp_cipher_process_aad(backend, ctx, ad)
processed_data = _evp_cipher_process_data(backend, ctx, data)
outlen = backend._ffi.new("int *")
# All AEADs we support besides OCB are streaming so they return nothing
# in finalization. OCB can return up to (16 byte block - 1) bytes so
# we need a buffer here too.
buf = backend._ffi.new("unsigned char[]", 16)
res = backend._lib.EVP_CipherFinal_ex(ctx, buf, outlen)
backend.openssl_assert(res != 0)
processed_data += backend._ffi.buffer(buf, outlen[0])[:]
tag_buf = backend._ffi.new("unsigned char[]", tag_length)
res = backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, backend._lib.EVP_CTRL_AEAD_GET_TAG, tag_length, tag_buf
)
backend.openssl_assert(res != 0)
tag = backend._ffi.buffer(tag_buf)[:]
if isinstance(cipher, AESSIV):
# RFC 5297 defines the output as IV || C, where the tag we generate
# is the "IV" and C is the ciphertext. This is the opposite of our
# other AEADs, which are Ciphertext || Tag
backend.openssl_assert(len(tag) == 16)
return tag + processed_data
else:
return processed_data + tag
def _evp_cipher_decrypt(
backend: Backend,
cipher: _AEADTypes,
nonce: bytes,
data: bytes,
associated_data: typing.List[bytes],
tag_length: int,
ctx: typing.Any = None,
) -> bytes:
from cryptography.hazmat.primitives.ciphers.aead import AESCCM, AESSIV
if len(data) < tag_length:
raise InvalidTag
if isinstance(cipher, AESSIV):
# RFC 5297 defines the output as IV || C, where the tag we generate
# is the "IV" and C is the ciphertext. This is the opposite of our
# other AEADs, which are Ciphertext || Tag
tag = data[:tag_length]
data = data[tag_length:]
else:
tag = data[-tag_length:]
data = data[:-tag_length]
if ctx is None:
cipher_name = _evp_cipher_cipher_name(cipher)
ctx = _evp_cipher_aead_setup(
backend,
cipher_name,
cipher._key,
nonce,
tag,
tag_length,
_DECRYPT,
)
else:
_evp_cipher_set_nonce_operation(backend, ctx, nonce, _DECRYPT)
_evp_cipher_set_tag(backend, ctx, tag)
# CCM requires us to pass the length of the data before processing
# anything.
# However calling this with any other AEAD results in an error
if isinstance(cipher, AESCCM):
_evp_cipher_set_length(backend, ctx, len(data))
for ad in associated_data:
_evp_cipher_process_aad(backend, ctx, ad)
# CCM has a different error path if the tag doesn't match. Errors are
# raised in Update and Final is irrelevant.
if isinstance(cipher, AESCCM):
outlen = backend._ffi.new("int *")
buf = backend._ffi.new("unsigned char[]", len(data))
d_ptr = backend._ffi.from_buffer(data)
res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, d_ptr, len(data))
if res != 1:
backend._consume_errors()
raise InvalidTag
processed_data = backend._ffi.buffer(buf, outlen[0])[:]
else:
processed_data = _evp_cipher_process_data(backend, ctx, data)
outlen = backend._ffi.new("int *")
# OCB can return up to 15 bytes (16 byte block - 1) in finalization
buf = backend._ffi.new("unsigned char[]", 16)
res = backend._lib.EVP_CipherFinal_ex(ctx, buf, outlen)
processed_data += backend._ffi.buffer(buf, outlen[0])[:]
if res == 0:
backend._consume_errors()
raise InvalidTag
return processed_data

View File

@@ -1,281 +0,0 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import annotations
import typing
from cryptography.exceptions import InvalidTag, UnsupportedAlgorithm, _Reasons
from cryptography.hazmat.primitives import ciphers
from cryptography.hazmat.primitives.ciphers import algorithms, modes
if typing.TYPE_CHECKING:
from cryptography.hazmat.backends.openssl.backend import Backend
class _CipherContext:
_ENCRYPT = 1
_DECRYPT = 0
_MAX_CHUNK_SIZE = 2**30 - 1
def __init__(self, backend: Backend, cipher, mode, operation: int) -> None:
self._backend = backend
self._cipher = cipher
self._mode = mode
self._operation = operation
self._tag: typing.Optional[bytes] = None
if isinstance(self._cipher, ciphers.BlockCipherAlgorithm):
self._block_size_bytes = self._cipher.block_size // 8
else:
self._block_size_bytes = 1
ctx = self._backend._lib.EVP_CIPHER_CTX_new()
ctx = self._backend._ffi.gc(
ctx, self._backend._lib.EVP_CIPHER_CTX_free
)
registry = self._backend._cipher_registry
try:
adapter = registry[type(cipher), type(mode)]
except KeyError:
raise UnsupportedAlgorithm(
"cipher {} in {} mode is not supported "
"by this backend.".format(
cipher.name, mode.name if mode else mode
),
_Reasons.UNSUPPORTED_CIPHER,
)
evp_cipher = adapter(self._backend, cipher, mode)
if evp_cipher == self._backend._ffi.NULL:
msg = f"cipher {cipher.name} "
if mode is not None:
msg += f"in {mode.name} mode "
msg += (
"is not supported by this backend (Your version of OpenSSL "
"may be too old. Current version: {}.)"
).format(self._backend.openssl_version_text())
raise UnsupportedAlgorithm(msg, _Reasons.UNSUPPORTED_CIPHER)
if isinstance(mode, modes.ModeWithInitializationVector):
iv_nonce = self._backend._ffi.from_buffer(
mode.initialization_vector
)
elif isinstance(mode, modes.ModeWithTweak):
iv_nonce = self._backend._ffi.from_buffer(mode.tweak)
elif isinstance(mode, modes.ModeWithNonce):
iv_nonce = self._backend._ffi.from_buffer(mode.nonce)
elif isinstance(cipher, algorithms.ChaCha20):
iv_nonce = self._backend._ffi.from_buffer(cipher.nonce)
else:
iv_nonce = self._backend._ffi.NULL
# begin init with cipher and operation type
res = self._backend._lib.EVP_CipherInit_ex(
ctx,
evp_cipher,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
operation,
)
self._backend.openssl_assert(res != 0)
# set the key length to handle variable key ciphers
res = self._backend._lib.EVP_CIPHER_CTX_set_key_length(
ctx, len(cipher.key)
)
self._backend.openssl_assert(res != 0)
if isinstance(mode, modes.GCM):
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
ctx,
self._backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
len(iv_nonce),
self._backend._ffi.NULL,
)
self._backend.openssl_assert(res != 0)
if mode.tag is not None:
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
ctx,
self._backend._lib.EVP_CTRL_AEAD_SET_TAG,
len(mode.tag),
mode.tag,
)
self._backend.openssl_assert(res != 0)
self._tag = mode.tag
# pass key/iv
res = self._backend._lib.EVP_CipherInit_ex(
ctx,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
self._backend._ffi.from_buffer(cipher.key),
iv_nonce,
operation,
)
# Check for XTS mode duplicate keys error
errors = self._backend._consume_errors()
lib = self._backend._lib
if res == 0 and (
(
not lib.CRYPTOGRAPHY_IS_LIBRESSL
and errors[0]._lib_reason_match(
lib.ERR_LIB_EVP, lib.EVP_R_XTS_DUPLICATED_KEYS
)
)
or (
lib.Cryptography_HAS_PROVIDERS
and errors[0]._lib_reason_match(
lib.ERR_LIB_PROV, lib.PROV_R_XTS_DUPLICATED_KEYS
)
)
):
raise ValueError("In XTS mode duplicated keys are not allowed")
self._backend.openssl_assert(res != 0, errors=errors)
# We purposely disable padding here as it's handled higher up in the
# API.
self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0)
self._ctx = ctx
def update(self, data: bytes) -> bytes:
buf = bytearray(len(data) + self._block_size_bytes - 1)
n = self.update_into(data, buf)
return bytes(buf[:n])
def update_into(self, data: bytes, buf: bytes) -> int:
total_data_len = len(data)
if len(buf) < (total_data_len + self._block_size_bytes - 1):
raise ValueError(
"buffer must be at least {} bytes for this "
"payload".format(len(data) + self._block_size_bytes - 1)
)
data_processed = 0
total_out = 0
outlen = self._backend._ffi.new("int *")
baseoutbuf = self._backend._ffi.from_buffer(buf, require_writable=True)
baseinbuf = self._backend._ffi.from_buffer(data)
while data_processed != total_data_len:
outbuf = baseoutbuf + total_out
inbuf = baseinbuf + data_processed
inlen = min(self._MAX_CHUNK_SIZE, total_data_len - data_processed)
res = self._backend._lib.EVP_CipherUpdate(
self._ctx, outbuf, outlen, inbuf, inlen
)
if res == 0 and isinstance(self._mode, modes.XTS):
self._backend._consume_errors()
raise ValueError(
"In XTS mode you must supply at least a full block in the "
"first update call. For AES this is 16 bytes."
)
else:
self._backend.openssl_assert(res != 0)
data_processed += inlen
total_out += outlen[0]
return total_out
def finalize(self) -> bytes:
if (
self._operation == self._DECRYPT
and isinstance(self._mode, modes.ModeWithAuthenticationTag)
and self.tag is None
):
raise ValueError(
"Authentication tag must be provided when decrypting."
)
buf = self._backend._ffi.new("unsigned char[]", self._block_size_bytes)
outlen = self._backend._ffi.new("int *")
res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen)
if res == 0:
errors = self._backend._consume_errors()
if not errors and isinstance(self._mode, modes.GCM):
raise InvalidTag
lib = self._backend._lib
self._backend.openssl_assert(
errors[0]._lib_reason_match(
lib.ERR_LIB_EVP,
lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH,
)
or (
lib.Cryptography_HAS_PROVIDERS
and errors[0]._lib_reason_match(
lib.ERR_LIB_PROV,
lib.PROV_R_WRONG_FINAL_BLOCK_LENGTH,
)
)
or (
lib.CRYPTOGRAPHY_IS_BORINGSSL
and errors[0].reason
== lib.CIPHER_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH
),
errors=errors,
)
raise ValueError(
"The length of the provided data is not a multiple of "
"the block length."
)
if (
isinstance(self._mode, modes.GCM)
and self._operation == self._ENCRYPT
):
tag_buf = self._backend._ffi.new(
"unsigned char[]", self._block_size_bytes
)
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
self._ctx,
self._backend._lib.EVP_CTRL_AEAD_GET_TAG,
self._block_size_bytes,
tag_buf,
)
self._backend.openssl_assert(res != 0)
self._tag = self._backend._ffi.buffer(tag_buf)[:]
res = self._backend._lib.EVP_CIPHER_CTX_reset(self._ctx)
self._backend.openssl_assert(res == 1)
return self._backend._ffi.buffer(buf)[: outlen[0]]
def finalize_with_tag(self, tag: bytes) -> bytes:
tag_len = len(tag)
if tag_len < self._mode._min_tag_length:
raise ValueError(
"Authentication tag must be {} bytes or longer.".format(
self._mode._min_tag_length
)
)
elif tag_len > self._block_size_bytes:
raise ValueError(
"Authentication tag cannot be more than {} bytes.".format(
self._block_size_bytes
)
)
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
self._ctx, self._backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag
)
self._backend.openssl_assert(res != 0)
self._tag = tag
return self.finalize()
def authenticate_additional_data(self, data: bytes) -> None:
outlen = self._backend._ffi.new("int *")
res = self._backend._lib.EVP_CipherUpdate(
self._ctx,
self._backend._ffi.NULL,
outlen,
self._backend._ffi.from_buffer(data),
len(data),
)
self._backend.openssl_assert(res != 0)
@property
def tag(self) -> typing.Optional[bytes]:
return self._tag

View File

@@ -1,89 +0,0 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import annotations
import typing
from cryptography.exceptions import (
InvalidSignature,
UnsupportedAlgorithm,
_Reasons,
)
from cryptography.hazmat.primitives import constant_time
from cryptography.hazmat.primitives.ciphers.modes import CBC
if typing.TYPE_CHECKING:
from cryptography.hazmat.backends.openssl.backend import Backend
from cryptography.hazmat.primitives import ciphers
class _CMACContext:
def __init__(
self,
backend: Backend,
algorithm: ciphers.BlockCipherAlgorithm,
ctx=None,
) -> None:
if not backend.cmac_algorithm_supported(algorithm):
raise UnsupportedAlgorithm(
"This backend does not support CMAC.",
_Reasons.UNSUPPORTED_CIPHER,
)
self._backend = backend
self._key = algorithm.key
self._algorithm = algorithm
self._output_length = algorithm.block_size // 8
if ctx is None:
registry = self._backend._cipher_registry
adapter = registry[type(algorithm), CBC]
evp_cipher = adapter(self._backend, algorithm, CBC)
ctx = self._backend._lib.CMAC_CTX_new()
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free)
key_ptr = self._backend._ffi.from_buffer(self._key)
res = self._backend._lib.CMAC_Init(
ctx,
key_ptr,
len(self._key),
evp_cipher,
self._backend._ffi.NULL,
)
self._backend.openssl_assert(res == 1)
self._ctx = ctx
def update(self, data: bytes) -> None:
res = self._backend._lib.CMAC_Update(self._ctx, data, len(data))
self._backend.openssl_assert(res == 1)
def finalize(self) -> bytes:
buf = self._backend._ffi.new("unsigned char[]", self._output_length)
length = self._backend._ffi.new("size_t *", self._output_length)
res = self._backend._lib.CMAC_Final(self._ctx, buf, length)
self._backend.openssl_assert(res == 1)
self._ctx = None
return self._backend._ffi.buffer(buf)[:]
def copy(self) -> _CMACContext:
copied_ctx = self._backend._lib.CMAC_CTX_new()
copied_ctx = self._backend._ffi.gc(
copied_ctx, self._backend._lib.CMAC_CTX_free
)
res = self._backend._lib.CMAC_CTX_copy(copied_ctx, self._ctx)
self._backend.openssl_assert(res == 1)
return _CMACContext(self._backend, self._algorithm, ctx=copied_ctx)
def verify(self, signature: bytes) -> None:
digest = self.finalize()
if not constant_time.bytes_eq(digest, signature):
raise InvalidSignature("Signature did not match digest.")

View File

@@ -1,32 +0,0 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import annotations
from cryptography import x509
# CRLReason ::= ENUMERATED {
# unspecified (0),
# keyCompromise (1),
# cACompromise (2),
# affiliationChanged (3),
# superseded (4),
# cessationOfOperation (5),
# certificateHold (6),
# -- value 7 is not used
# removeFromCRL (8),
# privilegeWithdrawn (9),
# aACompromise (10) }
_CRL_ENTRY_REASON_ENUM_TO_CODE = {
x509.ReasonFlags.unspecified: 0,
x509.ReasonFlags.key_compromise: 1,
x509.ReasonFlags.ca_compromise: 2,
x509.ReasonFlags.affiliation_changed: 3,
x509.ReasonFlags.superseded: 4,
x509.ReasonFlags.cessation_of_operation: 5,
x509.ReasonFlags.certificate_hold: 6,
x509.ReasonFlags.remove_from_crl: 8,
x509.ReasonFlags.privilege_withdrawn: 9,
x509.ReasonFlags.aa_compromise: 10,
}

View File

@@ -1,328 +0,0 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import annotations
import typing
from cryptography.exceptions import (
InvalidSignature,
UnsupportedAlgorithm,
_Reasons,
)
from cryptography.hazmat.backends.openssl.utils import (
_calculate_digest_and_algorithm,
_evp_pkey_derive,
)
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import ec
if typing.TYPE_CHECKING:
from cryptography.hazmat.backends.openssl.backend import Backend
def _check_signature_algorithm(
signature_algorithm: ec.EllipticCurveSignatureAlgorithm,
) -> None:
if not isinstance(signature_algorithm, ec.ECDSA):
raise UnsupportedAlgorithm(
"Unsupported elliptic curve signature algorithm.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
)
def _ec_key_curve_sn(backend: Backend, ec_key) -> str:
group = backend._lib.EC_KEY_get0_group(ec_key)
backend.openssl_assert(group != backend._ffi.NULL)
nid = backend._lib.EC_GROUP_get_curve_name(group)
# The following check is to find EC keys with unnamed curves and raise
# an error for now.
if nid == backend._lib.NID_undef:
raise ValueError(
"ECDSA keys with explicit parameters are unsupported at this time"
)
# This is like the above check, but it also catches the case where you
# explicitly encoded a curve with the same parameters as a named curve.
# Don't do that.
if (
not backend._lib.CRYPTOGRAPHY_IS_LIBRESSL
and backend._lib.EC_GROUP_get_asn1_flag(group) == 0
):
raise ValueError(
"ECDSA keys with explicit parameters are unsupported at this time"
)
curve_name = backend._lib.OBJ_nid2sn(nid)
backend.openssl_assert(curve_name != backend._ffi.NULL)
sn = backend._ffi.string(curve_name).decode("ascii")
return sn
def _mark_asn1_named_ec_curve(backend: Backend, ec_cdata):
"""
Set the named curve flag on the EC_KEY. This causes OpenSSL to
serialize EC keys along with their curve OID which makes
deserialization easier.
"""
backend._lib.EC_KEY_set_asn1_flag(
ec_cdata, backend._lib.OPENSSL_EC_NAMED_CURVE
)
def _check_key_infinity(backend: Backend, ec_cdata) -> None:
point = backend._lib.EC_KEY_get0_public_key(ec_cdata)
backend.openssl_assert(point != backend._ffi.NULL)
group = backend._lib.EC_KEY_get0_group(ec_cdata)
backend.openssl_assert(group != backend._ffi.NULL)
if backend._lib.EC_POINT_is_at_infinity(group, point):
raise ValueError(
"Cannot load an EC public key where the point is at infinity"
)
def _sn_to_elliptic_curve(backend: Backend, sn: str) -> ec.EllipticCurve:
try:
return ec._CURVE_TYPES[sn]()
except KeyError:
raise UnsupportedAlgorithm(
f"{sn} is not a supported elliptic curve",
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE,
)
def _ecdsa_sig_sign(
backend: Backend, private_key: _EllipticCurvePrivateKey, data: bytes
) -> bytes:
max_size = backend._lib.ECDSA_size(private_key._ec_key)
backend.openssl_assert(max_size > 0)
sigbuf = backend._ffi.new("unsigned char[]", max_size)
siglen_ptr = backend._ffi.new("unsigned int[]", 1)
res = backend._lib.ECDSA_sign(
0, data, len(data), sigbuf, siglen_ptr, private_key._ec_key
)
backend.openssl_assert(res == 1)
return backend._ffi.buffer(sigbuf)[: siglen_ptr[0]]
def _ecdsa_sig_verify(
backend: Backend,
public_key: _EllipticCurvePublicKey,
signature: bytes,
data: bytes,
) -> None:
res = backend._lib.ECDSA_verify(
0, data, len(data), signature, len(signature), public_key._ec_key
)
if res != 1:
backend._consume_errors()
raise InvalidSignature
class _EllipticCurvePrivateKey(ec.EllipticCurvePrivateKey):
def __init__(self, backend: Backend, ec_key_cdata, evp_pkey):
self._backend = backend
self._ec_key = ec_key_cdata
self._evp_pkey = evp_pkey
sn = _ec_key_curve_sn(backend, ec_key_cdata)
self._curve = _sn_to_elliptic_curve(backend, sn)
_mark_asn1_named_ec_curve(backend, ec_key_cdata)
_check_key_infinity(backend, ec_key_cdata)
@property
def curve(self) -> ec.EllipticCurve:
return self._curve
@property
def key_size(self) -> int:
return self.curve.key_size
def exchange(
self, algorithm: ec.ECDH, peer_public_key: ec.EllipticCurvePublicKey
) -> bytes:
if not (
self._backend.elliptic_curve_exchange_algorithm_supported(
algorithm, self.curve
)
):
raise UnsupportedAlgorithm(
"This backend does not support the ECDH algorithm.",
_Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
)
if peer_public_key.curve.name != self.curve.name:
raise ValueError(
"peer_public_key and self are not on the same curve"
)
return _evp_pkey_derive(self._backend, self._evp_pkey, peer_public_key)
def public_key(self) -> ec.EllipticCurvePublicKey:
group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
self._backend.openssl_assert(group != self._backend._ffi.NULL)
curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group)
public_ec_key = self._backend._ec_key_new_by_curve_nid(curve_nid)
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
self._backend.openssl_assert(point != self._backend._ffi.NULL)
res = self._backend._lib.EC_KEY_set_public_key(public_ec_key, point)
self._backend.openssl_assert(res == 1)
evp_pkey = self._backend._ec_cdata_to_evp_pkey(public_ec_key)
return _EllipticCurvePublicKey(self._backend, public_ec_key, evp_pkey)
def private_numbers(self) -> ec.EllipticCurvePrivateNumbers:
bn = self._backend._lib.EC_KEY_get0_private_key(self._ec_key)
private_value = self._backend._bn_to_int(bn)
return ec.EllipticCurvePrivateNumbers(
private_value=private_value,
public_numbers=self.public_key().public_numbers(),
)
def private_bytes(
self,
encoding: serialization.Encoding,
format: serialization.PrivateFormat,
encryption_algorithm: serialization.KeySerializationEncryption,
) -> bytes:
return self._backend._private_key_bytes(
encoding,
format,
encryption_algorithm,
self,
self._evp_pkey,
self._ec_key,
)
def sign(
self,
data: bytes,
signature_algorithm: ec.EllipticCurveSignatureAlgorithm,
) -> bytes:
_check_signature_algorithm(signature_algorithm)
data, _ = _calculate_digest_and_algorithm(
data,
signature_algorithm.algorithm,
)
return _ecdsa_sig_sign(self._backend, self, data)
class _EllipticCurvePublicKey(ec.EllipticCurvePublicKey):
def __init__(self, backend: Backend, ec_key_cdata, evp_pkey):
self._backend = backend
self._ec_key = ec_key_cdata
self._evp_pkey = evp_pkey
sn = _ec_key_curve_sn(backend, ec_key_cdata)
self._curve = _sn_to_elliptic_curve(backend, sn)
_mark_asn1_named_ec_curve(backend, ec_key_cdata)
_check_key_infinity(backend, ec_key_cdata)
@property
def curve(self) -> ec.EllipticCurve:
return self._curve
@property
def key_size(self) -> int:
return self.curve.key_size
def __eq__(self, other: object) -> bool:
if not isinstance(other, _EllipticCurvePublicKey):
return NotImplemented
return (
self._backend._lib.EVP_PKEY_cmp(self._evp_pkey, other._evp_pkey)
== 1
)
def public_numbers(self) -> ec.EllipticCurvePublicNumbers:
group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
self._backend.openssl_assert(group != self._backend._ffi.NULL)
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
self._backend.openssl_assert(point != self._backend._ffi.NULL)
with self._backend._tmp_bn_ctx() as bn_ctx:
bn_x = self._backend._lib.BN_CTX_get(bn_ctx)
bn_y = self._backend._lib.BN_CTX_get(bn_ctx)
res = self._backend._lib.EC_POINT_get_affine_coordinates(
group, point, bn_x, bn_y, bn_ctx
)
self._backend.openssl_assert(res == 1)
x = self._backend._bn_to_int(bn_x)
y = self._backend._bn_to_int(bn_y)
return ec.EllipticCurvePublicNumbers(x=x, y=y, curve=self._curve)
def _encode_point(self, format: serialization.PublicFormat) -> bytes:
if format is serialization.PublicFormat.CompressedPoint:
conversion = self._backend._lib.POINT_CONVERSION_COMPRESSED
else:
assert format is serialization.PublicFormat.UncompressedPoint
conversion = self._backend._lib.POINT_CONVERSION_UNCOMPRESSED
group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
self._backend.openssl_assert(group != self._backend._ffi.NULL)
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
self._backend.openssl_assert(point != self._backend._ffi.NULL)
with self._backend._tmp_bn_ctx() as bn_ctx:
buflen = self._backend._lib.EC_POINT_point2oct(
group, point, conversion, self._backend._ffi.NULL, 0, bn_ctx
)
self._backend.openssl_assert(buflen > 0)
buf = self._backend._ffi.new("char[]", buflen)
res = self._backend._lib.EC_POINT_point2oct(
group, point, conversion, buf, buflen, bn_ctx
)
self._backend.openssl_assert(buflen == res)
return self._backend._ffi.buffer(buf)[:]
def public_bytes(
self,
encoding: serialization.Encoding,
format: serialization.PublicFormat,
) -> bytes:
if (
encoding is serialization.Encoding.X962
or format is serialization.PublicFormat.CompressedPoint
or format is serialization.PublicFormat.UncompressedPoint
):
if encoding is not serialization.Encoding.X962 or format not in (
serialization.PublicFormat.CompressedPoint,
serialization.PublicFormat.UncompressedPoint,
):
raise ValueError(
"X962 encoding must be used with CompressedPoint or "
"UncompressedPoint format"
)
return self._encode_point(format)
else:
return self._backend._public_key_bytes(
encoding, format, self, self._evp_pkey, None
)
def verify(
self,
signature: bytes,
data: bytes,
signature_algorithm: ec.EllipticCurveSignatureAlgorithm,
) -> None:
_check_signature_algorithm(signature_algorithm)
data, _ = _calculate_digest_and_algorithm(
data,
signature_algorithm.algorithm,
)
_ecdsa_sig_verify(self._backend, self, signature, data)

View File

@@ -1,599 +0,0 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import annotations
import threading
import typing
from cryptography.exceptions import (
InvalidSignature,
UnsupportedAlgorithm,
_Reasons,
)
from cryptography.hazmat.backends.openssl.utils import (
_calculate_digest_and_algorithm,
)
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import utils as asym_utils
from cryptography.hazmat.primitives.asymmetric.padding import (
MGF1,
OAEP,
PSS,
AsymmetricPadding,
PKCS1v15,
_Auto,
_DigestLength,
_MaxLength,
calculate_max_pss_salt_length,
)
from cryptography.hazmat.primitives.asymmetric.rsa import (
RSAPrivateKey,
RSAPrivateNumbers,
RSAPublicKey,
RSAPublicNumbers,
)
if typing.TYPE_CHECKING:
from cryptography.hazmat.backends.openssl.backend import Backend
def _get_rsa_pss_salt_length(
backend: Backend,
pss: PSS,
key: typing.Union[RSAPrivateKey, RSAPublicKey],
hash_algorithm: hashes.HashAlgorithm,
) -> int:
salt = pss._salt_length
if isinstance(salt, _MaxLength):
return calculate_max_pss_salt_length(key, hash_algorithm)
elif isinstance(salt, _DigestLength):
return hash_algorithm.digest_size
elif isinstance(salt, _Auto):
if isinstance(key, RSAPrivateKey):
raise ValueError(
"PSS salt length can only be set to AUTO when verifying"
)
return backend._lib.RSA_PSS_SALTLEN_AUTO
else:
return salt
def _enc_dec_rsa(
backend: Backend,
key: typing.Union[_RSAPrivateKey, _RSAPublicKey],
data: bytes,
padding: AsymmetricPadding,
) -> bytes:
if not isinstance(padding, AsymmetricPadding):
raise TypeError("Padding must be an instance of AsymmetricPadding.")
if isinstance(padding, PKCS1v15):
padding_enum = backend._lib.RSA_PKCS1_PADDING
elif isinstance(padding, OAEP):
padding_enum = backend._lib.RSA_PKCS1_OAEP_PADDING
if not isinstance(padding._mgf, MGF1):
raise UnsupportedAlgorithm(
"Only MGF1 is supported by this backend.",
_Reasons.UNSUPPORTED_MGF,
)
if not backend.rsa_padding_supported(padding):
raise UnsupportedAlgorithm(
"This combination of padding and hash algorithm is not "
"supported by this backend.",
_Reasons.UNSUPPORTED_PADDING,
)
else:
raise UnsupportedAlgorithm(
f"{padding.name} is not supported by this backend.",
_Reasons.UNSUPPORTED_PADDING,
)
return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding)
def _enc_dec_rsa_pkey_ctx(
backend: Backend,
key: typing.Union[_RSAPrivateKey, _RSAPublicKey],
data: bytes,
padding_enum: int,
padding: AsymmetricPadding,
) -> bytes:
init: typing.Callable[[typing.Any], int]
crypt: typing.Callable[[typing.Any, typing.Any, int, bytes, int], int]
if isinstance(key, _RSAPublicKey):
init = backend._lib.EVP_PKEY_encrypt_init
crypt = backend._lib.EVP_PKEY_encrypt
else:
init = backend._lib.EVP_PKEY_decrypt_init
crypt = backend._lib.EVP_PKEY_decrypt
pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL)
backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
res = init(pkey_ctx)
backend.openssl_assert(res == 1)
res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum)
backend.openssl_assert(res > 0)
buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
backend.openssl_assert(buf_size > 0)
if isinstance(padding, OAEP):
mgf1_md = backend._evp_md_non_null_from_algorithm(
padding._mgf._algorithm
)
res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
backend.openssl_assert(res > 0)
oaep_md = backend._evp_md_non_null_from_algorithm(padding._algorithm)
res = backend._lib.EVP_PKEY_CTX_set_rsa_oaep_md(pkey_ctx, oaep_md)
backend.openssl_assert(res > 0)
if (
isinstance(padding, OAEP)
and padding._label is not None
and len(padding._label) > 0
):
# set0_rsa_oaep_label takes ownership of the char * so we need to
# copy it into some new memory
labelptr = backend._lib.OPENSSL_malloc(len(padding._label))
backend.openssl_assert(labelptr != backend._ffi.NULL)
backend._ffi.memmove(labelptr, padding._label, len(padding._label))
res = backend._lib.EVP_PKEY_CTX_set0_rsa_oaep_label(
pkey_ctx, labelptr, len(padding._label)
)
backend.openssl_assert(res == 1)
outlen = backend._ffi.new("size_t *", buf_size)
buf = backend._ffi.new("unsigned char[]", buf_size)
# Everything from this line onwards is written with the goal of being as
# constant-time as is practical given the constraints of Python and our
# API. See Bleichenbacher's '98 attack on RSA, and its many many variants.
# As such, you should not attempt to change this (particularly to "clean it
# up") without understanding why it was written this way (see
# Chesterton's Fence), and without measuring to verify you have not
# introduced observable time differences.
res = crypt(pkey_ctx, buf, outlen, data, len(data))
resbuf = backend._ffi.buffer(buf)[: outlen[0]]
backend._lib.ERR_clear_error()
if res <= 0:
raise ValueError("Encryption/decryption failed.")
return resbuf
def _rsa_sig_determine_padding(
backend: Backend,
key: typing.Union[_RSAPrivateKey, _RSAPublicKey],
padding: AsymmetricPadding,
algorithm: typing.Optional[hashes.HashAlgorithm],
) -> int:
if not isinstance(padding, AsymmetricPadding):
raise TypeError("Expected provider of AsymmetricPadding.")
pkey_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
backend.openssl_assert(pkey_size > 0)
if isinstance(padding, PKCS1v15):
# Hash algorithm is ignored for PKCS1v15-padding, may be None.
padding_enum = backend._lib.RSA_PKCS1_PADDING
elif isinstance(padding, PSS):
if not isinstance(padding._mgf, MGF1):
raise UnsupportedAlgorithm(
"Only MGF1 is supported by this backend.",
_Reasons.UNSUPPORTED_MGF,
)
# PSS padding requires a hash algorithm
if not isinstance(algorithm, hashes.HashAlgorithm):
raise TypeError("Expected instance of hashes.HashAlgorithm.")
# Size of key in bytes - 2 is the maximum
# PSS signature length (salt length is checked later)
if pkey_size - algorithm.digest_size - 2 < 0:
raise ValueError(
"Digest too large for key size. Use a larger "
"key or different digest."
)
padding_enum = backend._lib.RSA_PKCS1_PSS_PADDING
else:
raise UnsupportedAlgorithm(
f"{padding.name} is not supported by this backend.",
_Reasons.UNSUPPORTED_PADDING,
)
return padding_enum
# Hash algorithm can be absent (None) to initialize the context without setting
# any message digest algorithm. This is currently only valid for the PKCS1v15
# padding type, where it means that the signature data is encoded/decoded
# as provided, without being wrapped in a DigestInfo structure.
def _rsa_sig_setup(
backend: Backend,
padding: AsymmetricPadding,
algorithm: typing.Optional[hashes.HashAlgorithm],
key: typing.Union[_RSAPublicKey, _RSAPrivateKey],
init_func: typing.Callable[[typing.Any], int],
):
padding_enum = _rsa_sig_determine_padding(backend, key, padding, algorithm)
pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL)
backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
res = init_func(pkey_ctx)
if res != 1:
errors = backend._consume_errors()
raise ValueError("Unable to sign/verify with this key", errors)
if algorithm is not None:
evp_md = backend._evp_md_non_null_from_algorithm(algorithm)
res = backend._lib.EVP_PKEY_CTX_set_signature_md(pkey_ctx, evp_md)
if res <= 0:
backend._consume_errors()
raise UnsupportedAlgorithm(
"{} is not supported by this backend for RSA signing.".format(
algorithm.name
),
_Reasons.UNSUPPORTED_HASH,
)
res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum)
if res <= 0:
backend._consume_errors()
raise UnsupportedAlgorithm(
"{} is not supported for the RSA signature operation.".format(
padding.name
),
_Reasons.UNSUPPORTED_PADDING,
)
if isinstance(padding, PSS):
assert isinstance(algorithm, hashes.HashAlgorithm)
res = backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
pkey_ctx,
_get_rsa_pss_salt_length(backend, padding, key, algorithm),
)
backend.openssl_assert(res > 0)
mgf1_md = backend._evp_md_non_null_from_algorithm(
padding._mgf._algorithm
)
res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
backend.openssl_assert(res > 0)
return pkey_ctx
def _rsa_sig_sign(
backend: Backend,
padding: AsymmetricPadding,
algorithm: hashes.HashAlgorithm,
private_key: _RSAPrivateKey,
data: bytes,
) -> bytes:
pkey_ctx = _rsa_sig_setup(
backend,
padding,
algorithm,
private_key,
backend._lib.EVP_PKEY_sign_init,
)
buflen = backend._ffi.new("size_t *")
res = backend._lib.EVP_PKEY_sign(
pkey_ctx, backend._ffi.NULL, buflen, data, len(data)
)
backend.openssl_assert(res == 1)
buf = backend._ffi.new("unsigned char[]", buflen[0])
res = backend._lib.EVP_PKEY_sign(pkey_ctx, buf, buflen, data, len(data))
if res != 1:
errors = backend._consume_errors()
raise ValueError(
"Digest or salt length too long for key size. Use a larger key "
"or shorter salt length if you are specifying a PSS salt",
errors,
)
return backend._ffi.buffer(buf)[:]
def _rsa_sig_verify(
backend: Backend,
padding: AsymmetricPadding,
algorithm: hashes.HashAlgorithm,
public_key: _RSAPublicKey,
signature: bytes,
data: bytes,
) -> None:
pkey_ctx = _rsa_sig_setup(
backend,
padding,
algorithm,
public_key,
backend._lib.EVP_PKEY_verify_init,
)
res = backend._lib.EVP_PKEY_verify(
pkey_ctx, signature, len(signature), data, len(data)
)
# The previous call can return negative numbers in the event of an
# error. This is not a signature failure but we need to fail if it
# occurs.
backend.openssl_assert(res >= 0)
if res == 0:
backend._consume_errors()
raise InvalidSignature
def _rsa_sig_recover(
backend: Backend,
padding: AsymmetricPadding,
algorithm: typing.Optional[hashes.HashAlgorithm],
public_key: _RSAPublicKey,
signature: bytes,
) -> bytes:
pkey_ctx = _rsa_sig_setup(
backend,
padding,
algorithm,
public_key,
backend._lib.EVP_PKEY_verify_recover_init,
)
# Attempt to keep the rest of the code in this function as constant/time
# as possible. See the comment in _enc_dec_rsa_pkey_ctx. Note that the
# buflen parameter is used even though its value may be undefined in the
# error case. Due to the tolerant nature of Python slicing this does not
# trigger any exceptions.
maxlen = backend._lib.EVP_PKEY_size(public_key._evp_pkey)
backend.openssl_assert(maxlen > 0)
buf = backend._ffi.new("unsigned char[]", maxlen)
buflen = backend._ffi.new("size_t *", maxlen)
res = backend._lib.EVP_PKEY_verify_recover(
pkey_ctx, buf, buflen, signature, len(signature)
)
resbuf = backend._ffi.buffer(buf)[: buflen[0]]
backend._lib.ERR_clear_error()
# Assume that all parameter errors are handled during the setup phase and
# any error here is due to invalid signature.
if res != 1:
raise InvalidSignature
return resbuf
class _RSAPrivateKey(RSAPrivateKey):
_evp_pkey: object
_rsa_cdata: object
_key_size: int
def __init__(
self,
backend: Backend,
rsa_cdata,
evp_pkey,
*,
unsafe_skip_rsa_key_validation: bool,
):
res: int
# RSA_check_key is slower in OpenSSL 3.0.0 due to improved
# primality checking. In normal use this is unlikely to be a problem
# since users don't load new keys constantly, but for TESTING we've
# added an init arg that allows skipping the checks. You should not
# use this in production code unless you understand the consequences.
if not unsafe_skip_rsa_key_validation:
res = backend._lib.RSA_check_key(rsa_cdata)
if res != 1:
errors = backend._consume_errors()
raise ValueError("Invalid private key", errors)
# 2 is prime and passes an RSA key check, so we also check
# if p and q are odd just to be safe.
p = backend._ffi.new("BIGNUM **")
q = backend._ffi.new("BIGNUM **")
backend._lib.RSA_get0_factors(rsa_cdata, p, q)
backend.openssl_assert(p[0] != backend._ffi.NULL)
backend.openssl_assert(q[0] != backend._ffi.NULL)
p_odd = backend._lib.BN_is_odd(p[0])
q_odd = backend._lib.BN_is_odd(q[0])
if p_odd != 1 or q_odd != 1:
errors = backend._consume_errors()
raise ValueError("Invalid private key", errors)
self._backend = backend
self._rsa_cdata = rsa_cdata
self._evp_pkey = evp_pkey
# Used for lazy blinding
self._blinded = False
self._blinding_lock = threading.Lock()
n = self._backend._ffi.new("BIGNUM **")
self._backend._lib.RSA_get0_key(
self._rsa_cdata,
n,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
)
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
self._key_size = self._backend._lib.BN_num_bits(n[0])
def _enable_blinding(self) -> None:
# If you call blind on an already blinded RSA key OpenSSL will turn
# it off and back on, which is a performance hit we want to avoid.
if not self._blinded:
with self._blinding_lock:
self._non_threadsafe_enable_blinding()
def _non_threadsafe_enable_blinding(self) -> None:
# This is only a separate function to allow for testing to cover both
# branches. It should never be invoked except through _enable_blinding.
# Check if it's not True again in case another thread raced past the
# first non-locked check.
if not self._blinded:
res = self._backend._lib.RSA_blinding_on(
self._rsa_cdata, self._backend._ffi.NULL
)
self._backend.openssl_assert(res == 1)
self._blinded = True
@property
def key_size(self) -> int:
return self._key_size
def decrypt(self, ciphertext: bytes, padding: AsymmetricPadding) -> bytes:
self._enable_blinding()
key_size_bytes = (self.key_size + 7) // 8
if key_size_bytes != len(ciphertext):
raise ValueError("Ciphertext length must be equal to key size.")
return _enc_dec_rsa(self._backend, self, ciphertext, padding)
def public_key(self) -> RSAPublicKey:
ctx = self._backend._lib.RSAPublicKey_dup(self._rsa_cdata)
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
ctx = self._backend._ffi.gc(ctx, self._backend._lib.RSA_free)
evp_pkey = self._backend._rsa_cdata_to_evp_pkey(ctx)
return _RSAPublicKey(self._backend, ctx, evp_pkey)
def private_numbers(self) -> RSAPrivateNumbers:
n = self._backend._ffi.new("BIGNUM **")
e = self._backend._ffi.new("BIGNUM **")
d = self._backend._ffi.new("BIGNUM **")
p = self._backend._ffi.new("BIGNUM **")
q = self._backend._ffi.new("BIGNUM **")
dmp1 = self._backend._ffi.new("BIGNUM **")
dmq1 = self._backend._ffi.new("BIGNUM **")
iqmp = self._backend._ffi.new("BIGNUM **")
self._backend._lib.RSA_get0_key(self._rsa_cdata, n, e, d)
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(e[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(d[0] != self._backend._ffi.NULL)
self._backend._lib.RSA_get0_factors(self._rsa_cdata, p, q)
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
self._backend._lib.RSA_get0_crt_params(
self._rsa_cdata, dmp1, dmq1, iqmp
)
self._backend.openssl_assert(dmp1[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(dmq1[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(iqmp[0] != self._backend._ffi.NULL)
return RSAPrivateNumbers(
p=self._backend._bn_to_int(p[0]),
q=self._backend._bn_to_int(q[0]),
d=self._backend._bn_to_int(d[0]),
dmp1=self._backend._bn_to_int(dmp1[0]),
dmq1=self._backend._bn_to_int(dmq1[0]),
iqmp=self._backend._bn_to_int(iqmp[0]),
public_numbers=RSAPublicNumbers(
e=self._backend._bn_to_int(e[0]),
n=self._backend._bn_to_int(n[0]),
),
)
def private_bytes(
self,
encoding: serialization.Encoding,
format: serialization.PrivateFormat,
encryption_algorithm: serialization.KeySerializationEncryption,
) -> bytes:
return self._backend._private_key_bytes(
encoding,
format,
encryption_algorithm,
self,
self._evp_pkey,
self._rsa_cdata,
)
def sign(
self,
data: bytes,
padding: AsymmetricPadding,
algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
) -> bytes:
self._enable_blinding()
data, algorithm = _calculate_digest_and_algorithm(data, algorithm)
return _rsa_sig_sign(self._backend, padding, algorithm, self, data)
class _RSAPublicKey(RSAPublicKey):
_evp_pkey: object
_rsa_cdata: object
_key_size: int
def __init__(self, backend: Backend, rsa_cdata, evp_pkey):
self._backend = backend
self._rsa_cdata = rsa_cdata
self._evp_pkey = evp_pkey
n = self._backend._ffi.new("BIGNUM **")
self._backend._lib.RSA_get0_key(
self._rsa_cdata,
n,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
)
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
self._key_size = self._backend._lib.BN_num_bits(n[0])
@property
def key_size(self) -> int:
return self._key_size
def __eq__(self, other: object) -> bool:
if not isinstance(other, _RSAPublicKey):
return NotImplemented
return (
self._backend._lib.EVP_PKEY_cmp(self._evp_pkey, other._evp_pkey)
== 1
)
def encrypt(self, plaintext: bytes, padding: AsymmetricPadding) -> bytes:
return _enc_dec_rsa(self._backend, self, plaintext, padding)
def public_numbers(self) -> RSAPublicNumbers:
n = self._backend._ffi.new("BIGNUM **")
e = self._backend._ffi.new("BIGNUM **")
self._backend._lib.RSA_get0_key(
self._rsa_cdata, n, e, self._backend._ffi.NULL
)
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(e[0] != self._backend._ffi.NULL)
return RSAPublicNumbers(
e=self._backend._bn_to_int(e[0]),
n=self._backend._bn_to_int(n[0]),
)
def public_bytes(
self,
encoding: serialization.Encoding,
format: serialization.PublicFormat,
) -> bytes:
return self._backend._public_key_bytes(
encoding, format, self, self._evp_pkey, self._rsa_cdata
)
def verify(
self,
signature: bytes,
data: bytes,
padding: AsymmetricPadding,
algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
) -> None:
data, algorithm = _calculate_digest_and_algorithm(data, algorithm)
_rsa_sig_verify(
self._backend, padding, algorithm, self, signature, data
)
def recover_data_from_signature(
self,
signature: bytes,
padding: AsymmetricPadding,
algorithm: typing.Optional[hashes.HashAlgorithm],
) -> bytes:
if isinstance(algorithm, asym_utils.Prehashed):
raise TypeError(
"Prehashed is only supported in the sign and verify methods. "
"It cannot be used with recover_data_from_signature."
)
return _rsa_sig_recover(
self._backend, padding, algorithm, self, signature
)

View File

@@ -1,63 +0,0 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import annotations
import typing
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric.utils import Prehashed
if typing.TYPE_CHECKING:
from cryptography.hazmat.backends.openssl.backend import Backend
def _evp_pkey_derive(backend: Backend, evp_pkey, peer_public_key) -> bytes:
ctx = backend._lib.EVP_PKEY_CTX_new(evp_pkey, backend._ffi.NULL)
backend.openssl_assert(ctx != backend._ffi.NULL)
ctx = backend._ffi.gc(ctx, backend._lib.EVP_PKEY_CTX_free)
res = backend._lib.EVP_PKEY_derive_init(ctx)
backend.openssl_assert(res == 1)
if backend._lib.Cryptography_HAS_EVP_PKEY_SET_PEER_EX:
res = backend._lib.EVP_PKEY_derive_set_peer_ex(
ctx, peer_public_key._evp_pkey, 0
)
else:
res = backend._lib.EVP_PKEY_derive_set_peer(
ctx, peer_public_key._evp_pkey
)
backend.openssl_assert(res == 1)
keylen = backend._ffi.new("size_t *")
res = backend._lib.EVP_PKEY_derive(ctx, backend._ffi.NULL, keylen)
backend.openssl_assert(res == 1)
backend.openssl_assert(keylen[0] > 0)
buf = backend._ffi.new("unsigned char[]", keylen[0])
res = backend._lib.EVP_PKEY_derive(ctx, buf, keylen)
if res != 1:
errors = backend._consume_errors()
raise ValueError("Error computing shared key.", errors)
return backend._ffi.buffer(buf, keylen[0])[:]
def _calculate_digest_and_algorithm(
data: bytes,
algorithm: typing.Union[Prehashed, hashes.HashAlgorithm],
) -> typing.Tuple[bytes, hashes.HashAlgorithm]:
if not isinstance(algorithm, Prehashed):
hash_ctx = hashes.Hash(algorithm)
hash_ctx.update(data)
data = hash_ctx.finalize()
else:
algorithm = algorithm._algorithm
if len(data) != algorithm.digest_size:
raise ValueError(
"The provided data must be the same length as the hash "
"algorithm's digest size."
)
return (data, algorithm)

View File

@@ -2,33 +2,36 @@
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import types
import typing
def check_pkcs7_padding(data: bytes) -> bool: ...
def check_ansix923_padding(data: bytes) -> bool: ...
from cryptography.hazmat.primitives import padding
from cryptography.utils import Buffer
class PKCS7PaddingContext(padding.PaddingContext):
def __init__(self, block_size: int) -> None: ...
def update(self, data: Buffer) -> bytes: ...
def finalize(self) -> bytes: ...
class ANSIX923PaddingContext(padding.PaddingContext):
def __init__(self, block_size: int) -> None: ...
def update(self, data: Buffer) -> bytes: ...
def finalize(self) -> bytes: ...
class PKCS7UnpaddingContext(padding.PaddingContext):
def __init__(self, block_size: int) -> None: ...
def update(self, data: Buffer) -> bytes: ...
def finalize(self) -> bytes: ...
class ANSIX923UnpaddingContext(padding.PaddingContext):
def __init__(self, block_size: int) -> None: ...
def update(self, data: Buffer) -> bytes: ...
def finalize(self) -> bytes: ...
class ObjectIdentifier:
def __init__(self, val: str) -> None: ...
def __init__(self, value: str) -> None: ...
@property
def dotted_string(self) -> str: ...
@property
def _name(self) -> str: ...
T = typing.TypeVar("T")
class FixedPool(typing.Generic[T]):
def __init__(
self,
create: typing.Callable[[], T],
) -> None: ...
def acquire(self) -> PoolAcquisition[T]: ...
class PoolAcquisition(typing.Generic[T]):
def __enter__(self) -> T: ...
def __exit__(
self,
exc_type: typing.Optional[typing.Type[BaseException]],
exc_value: typing.Optional[BaseException],
exc_tb: typing.Optional[types.TracebackType],
) -> None: ...

View File

@@ -2,15 +2,6 @@
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import typing
class TestCertificate:
not_after_tag: int
not_before_tag: int
issuer_value_tags: typing.List[int]
subject_value_tags: typing.List[int]
def decode_dss_signature(signature: bytes) -> typing.Tuple[int, int]: ...
def decode_dss_signature(signature: bytes) -> tuple[int, int]: ...
def encode_dss_signature(r: int, s: int) -> bytes: ...
def parse_spki_for_data(data: bytes) -> bytes: ...
def test_parse_certificate(data: bytes) -> TestCertificate: ...

View File

@@ -0,0 +1,32 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import typing
def encode_der(value: typing.Any) -> bytes: ...
def non_root_python_to_rust(cls: type) -> Type: ...
# Type is a Rust enum with tuple variants. For now, we express the type
# annotations like this:
class Type:
Sequence: typing.ClassVar[type]
PyInt: typing.ClassVar[type]
class Annotation:
def __new__(
cls,
) -> Annotation: ...
class AnnotatedType:
inner: Type
annotation: Annotation
def __new__(cls, inner: Type, annotation: Annotation) -> AnnotatedType: ...
class AnnotatedTypeObject:
annotated_type: AnnotatedType
value: typing.Any
def __new__(
cls, annotated_type: AnnotatedType, value: typing.Any
) -> AnnotatedTypeObject: ...

View File

@@ -2,24 +2,116 @@
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import typing
import datetime
from collections.abc import Iterator
from cryptography.hazmat.primitives import hashes
from cryptography import x509
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
from cryptography.x509.ocsp import (
OCSPRequest,
OCSPRequestBuilder,
OCSPResponse,
OCSPResponseBuilder,
OCSPResponseStatus,
)
from cryptography.x509 import ocsp
def load_der_ocsp_request(data: bytes) -> OCSPRequest: ...
def load_der_ocsp_response(data: bytes) -> OCSPResponse: ...
def create_ocsp_request(builder: OCSPRequestBuilder) -> OCSPRequest: ...
class OCSPRequest:
@property
def issuer_key_hash(self) -> bytes: ...
@property
def issuer_name_hash(self) -> bytes: ...
@property
def hash_algorithm(self) -> hashes.HashAlgorithm: ...
@property
def serial_number(self) -> int: ...
def public_bytes(self, encoding: serialization.Encoding) -> bytes: ...
@property
def extensions(self) -> x509.Extensions: ...
class OCSPResponse:
@property
def responses(self) -> Iterator[OCSPSingleResponse]: ...
@property
def response_status(self) -> ocsp.OCSPResponseStatus: ...
@property
def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ...
@property
def signature_hash_algorithm(
self,
) -> hashes.HashAlgorithm | None: ...
@property
def signature(self) -> bytes: ...
@property
def tbs_response_bytes(self) -> bytes: ...
@property
def certificates(self) -> list[x509.Certificate]: ...
@property
def responder_key_hash(self) -> bytes | None: ...
@property
def responder_name(self) -> x509.Name | None: ...
@property
def produced_at(self) -> datetime.datetime: ...
@property
def produced_at_utc(self) -> datetime.datetime: ...
@property
def certificate_status(self) -> ocsp.OCSPCertStatus: ...
@property
def revocation_time(self) -> datetime.datetime | None: ...
@property
def revocation_time_utc(self) -> datetime.datetime | None: ...
@property
def revocation_reason(self) -> x509.ReasonFlags | None: ...
@property
def this_update(self) -> datetime.datetime: ...
@property
def this_update_utc(self) -> datetime.datetime: ...
@property
def next_update(self) -> datetime.datetime | None: ...
@property
def next_update_utc(self) -> datetime.datetime | None: ...
@property
def issuer_key_hash(self) -> bytes: ...
@property
def issuer_name_hash(self) -> bytes: ...
@property
def hash_algorithm(self) -> hashes.HashAlgorithm: ...
@property
def serial_number(self) -> int: ...
@property
def extensions(self) -> x509.Extensions: ...
@property
def single_extensions(self) -> x509.Extensions: ...
def public_bytes(self, encoding: serialization.Encoding) -> bytes: ...
class OCSPSingleResponse:
@property
def certificate_status(self) -> ocsp.OCSPCertStatus: ...
@property
def revocation_time(self) -> datetime.datetime | None: ...
@property
def revocation_time_utc(self) -> datetime.datetime | None: ...
@property
def revocation_reason(self) -> x509.ReasonFlags | None: ...
@property
def this_update(self) -> datetime.datetime: ...
@property
def this_update_utc(self) -> datetime.datetime: ...
@property
def next_update(self) -> datetime.datetime | None: ...
@property
def next_update_utc(self) -> datetime.datetime | None: ...
@property
def issuer_key_hash(self) -> bytes: ...
@property
def issuer_name_hash(self) -> bytes: ...
@property
def hash_algorithm(self) -> hashes.HashAlgorithm: ...
@property
def serial_number(self) -> int: ...
def load_der_ocsp_request(data: bytes) -> ocsp.OCSPRequest: ...
def load_der_ocsp_response(data: bytes) -> ocsp.OCSPResponse: ...
def create_ocsp_request(
builder: ocsp.OCSPRequestBuilder,
) -> ocsp.OCSPRequest: ...
def create_ocsp_response(
status: OCSPResponseStatus,
builder: typing.Optional[OCSPResponseBuilder],
private_key: typing.Optional[PrivateKeyTypes],
hash_algorithm: typing.Optional[hashes.HashAlgorithm],
) -> OCSPResponse: ...
status: ocsp.OCSPResponseStatus,
builder: ocsp.OCSPResponseBuilder | None,
private_key: PrivateKeyTypes | None,
hash_algorithm: hashes.HashAlgorithm | None,
) -> ocsp.OCSPResponse: ...

View File

@@ -5,37 +5,66 @@
import typing
from cryptography.hazmat.bindings._rust.openssl import (
aead,
ciphers,
cmac,
dh,
dsa,
ec,
ed448,
ed25519,
hashes,
hmac,
kdf,
keys,
poly1305,
rsa,
x448,
x25519,
)
__all__ = [
"openssl_version",
"raise_openssl_error",
"aead",
"ciphers",
"cmac",
"dh",
"dsa",
"ec",
"ed448",
"ed25519",
"hashes",
"hmac",
"kdf",
"ed448",
"ed25519",
"keys",
"openssl_version",
"openssl_version_text",
"poly1305",
"raise_openssl_error",
"rsa",
"x448",
"x25519",
]
CRYPTOGRAPHY_IS_LIBRESSL: bool
CRYPTOGRAPHY_IS_BORINGSSL: bool
CRYPTOGRAPHY_IS_AWSLC: bool
CRYPTOGRAPHY_OPENSSL_300_OR_GREATER: bool
CRYPTOGRAPHY_OPENSSL_309_OR_GREATER: bool
CRYPTOGRAPHY_OPENSSL_320_OR_GREATER: bool
CRYPTOGRAPHY_OPENSSL_330_OR_GREATER: bool
CRYPTOGRAPHY_OPENSSL_350_OR_GREATER: bool
class Providers: ...
_legacy_provider_loaded: bool
_providers: Providers
def openssl_version() -> int: ...
def openssl_version_text() -> str: ...
def raise_openssl_error() -> typing.NoReturn: ...
def capture_error_stack() -> typing.List[OpenSSLError]: ...
def capture_error_stack() -> list[OpenSSLError]: ...
def is_fips_enabled() -> bool: ...
def enable_fips(providers: Providers) -> None: ...
class OpenSSLError:
@property
@@ -44,4 +73,3 @@ class OpenSSLError:
def reason(self) -> int: ...
@property
def reason_text(self) -> bytes: ...
def _lib_reason_match(self, lib: int, reason: int) -> bool: ...

View File

@@ -0,0 +1,107 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from collections.abc import Sequence
from cryptography.utils import Buffer
class AESGCM:
def __init__(self, key: Buffer) -> None: ...
@staticmethod
def generate_key(bit_length: int) -> bytes: ...
def encrypt(
self,
nonce: Buffer,
data: Buffer,
associated_data: Buffer | None,
) -> bytes: ...
def decrypt(
self,
nonce: Buffer,
data: Buffer,
associated_data: Buffer | None,
) -> bytes: ...
class ChaCha20Poly1305:
def __init__(self, key: Buffer) -> None: ...
@staticmethod
def generate_key() -> bytes: ...
def encrypt(
self,
nonce: Buffer,
data: Buffer,
associated_data: Buffer | None,
) -> bytes: ...
def decrypt(
self,
nonce: Buffer,
data: Buffer,
associated_data: Buffer | None,
) -> bytes: ...
class AESCCM:
def __init__(self, key: Buffer, tag_length: int = 16) -> None: ...
@staticmethod
def generate_key(bit_length: int) -> bytes: ...
def encrypt(
self,
nonce: Buffer,
data: Buffer,
associated_data: Buffer | None,
) -> bytes: ...
def decrypt(
self,
nonce: Buffer,
data: Buffer,
associated_data: Buffer | None,
) -> bytes: ...
class AESSIV:
def __init__(self, key: Buffer) -> None: ...
@staticmethod
def generate_key(bit_length: int) -> bytes: ...
def encrypt(
self,
data: Buffer,
associated_data: Sequence[Buffer] | None,
) -> bytes: ...
def decrypt(
self,
data: Buffer,
associated_data: Sequence[Buffer] | None,
) -> bytes: ...
class AESOCB3:
def __init__(self, key: Buffer) -> None: ...
@staticmethod
def generate_key(bit_length: int) -> bytes: ...
def encrypt(
self,
nonce: Buffer,
data: Buffer,
associated_data: Buffer | None,
) -> bytes: ...
def decrypt(
self,
nonce: Buffer,
data: Buffer,
associated_data: Buffer | None,
) -> bytes: ...
class AESGCMSIV:
def __init__(self, key: Buffer) -> None: ...
@staticmethod
def generate_key(bit_length: int) -> bytes: ...
def encrypt(
self,
nonce: Buffer,
data: Buffer,
associated_data: Buffer | None,
) -> bytes: ...
def decrypt(
self,
nonce: Buffer,
data: Buffer,
associated_data: Buffer | None,
) -> bytes: ...

View File

@@ -0,0 +1,38 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import typing
from cryptography.hazmat.primitives import ciphers
from cryptography.hazmat.primitives.ciphers import modes
@typing.overload
def create_encryption_ctx(
algorithm: ciphers.CipherAlgorithm, mode: modes.ModeWithAuthenticationTag
) -> ciphers.AEADEncryptionContext: ...
@typing.overload
def create_encryption_ctx(
algorithm: ciphers.CipherAlgorithm, mode: modes.Mode | None
) -> ciphers.CipherContext: ...
@typing.overload
def create_decryption_ctx(
algorithm: ciphers.CipherAlgorithm, mode: modes.ModeWithAuthenticationTag
) -> ciphers.AEADDecryptionContext: ...
@typing.overload
def create_decryption_ctx(
algorithm: ciphers.CipherAlgorithm, mode: modes.Mode | None
) -> ciphers.CipherContext: ...
def cipher_supported(
algorithm: ciphers.CipherAlgorithm, mode: modes.Mode
) -> bool: ...
def _advance(
ctx: ciphers.AEADEncryptionContext | ciphers.AEADDecryptionContext, n: int
) -> None: ...
def _advance_aad(
ctx: ciphers.AEADEncryptionContext | ciphers.AEADDecryptionContext, n: int
) -> None: ...
class CipherContext: ...
class AEADEncryptionContext: ...
class AEADDecryptionContext: ...

View File

@@ -0,0 +1,18 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import typing
from cryptography.hazmat.primitives import ciphers
class CMAC:
def __init__(
self,
algorithm: ciphers.BlockCipherAlgorithm,
backend: typing.Any = None,
) -> None: ...
def update(self, data: bytes) -> None: ...
def finalize(self) -> bytes: ...
def verify(self, signature: bytes) -> None: ...
def copy(self) -> CMAC: ...

View File

@@ -2,6 +2,8 @@
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import typing
from cryptography.hazmat.primitives.asymmetric import dh
MIN_MODULUS_SIZE: int
@@ -10,13 +12,40 @@ class DHPrivateKey: ...
class DHPublicKey: ...
class DHParameters: ...
def generate_parameters(generator: int, key_size: int) -> dh.DHParameters: ...
def private_key_from_ptr(ptr: int) -> dh.DHPrivateKey: ...
def public_key_from_ptr(ptr: int) -> dh.DHPublicKey: ...
def from_pem_parameters(data: bytes) -> dh.DHParameters: ...
def from_der_parameters(data: bytes) -> dh.DHParameters: ...
def from_private_numbers(numbers: dh.DHPrivateNumbers) -> dh.DHPrivateKey: ...
def from_public_numbers(numbers: dh.DHPublicNumbers) -> dh.DHPublicKey: ...
def from_parameter_numbers(
numbers: dh.DHParameterNumbers,
class DHPrivateNumbers:
def __init__(self, x: int, public_numbers: DHPublicNumbers) -> None: ...
def private_key(self, backend: typing.Any = None) -> dh.DHPrivateKey: ...
@property
def x(self) -> int: ...
@property
def public_numbers(self) -> DHPublicNumbers: ...
class DHPublicNumbers:
def __init__(
self, y: int, parameter_numbers: DHParameterNumbers
) -> None: ...
def public_key(self, backend: typing.Any = None) -> dh.DHPublicKey: ...
@property
def y(self) -> int: ...
@property
def parameter_numbers(self) -> DHParameterNumbers: ...
class DHParameterNumbers:
def __init__(self, p: int, g: int, q: int | None = None) -> None: ...
def parameters(self, backend: typing.Any = None) -> dh.DHParameters: ...
@property
def p(self) -> int: ...
@property
def g(self) -> int: ...
@property
def q(self) -> int | None: ...
def generate_parameters(
generator: int, key_size: int, backend: typing.Any = None
) -> dh.DHParameters: ...
def from_pem_parameters(
data: bytes, backend: typing.Any = None
) -> dh.DHParameters: ...
def from_der_parameters(
data: bytes, backend: typing.Any = None
) -> dh.DHParameters: ...

View File

@@ -2,19 +2,40 @@
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import typing
from cryptography.hazmat.primitives.asymmetric import dsa
class DSAPrivateKey: ...
class DSAPublicKey: ...
class DSAParameters: ...
class DSAPrivateNumbers:
def __init__(self, x: int, public_numbers: DSAPublicNumbers) -> None: ...
@property
def x(self) -> int: ...
@property
def public_numbers(self) -> DSAPublicNumbers: ...
def private_key(self, backend: typing.Any = None) -> dsa.DSAPrivateKey: ...
class DSAPublicNumbers:
def __init__(
self, y: int, parameter_numbers: DSAParameterNumbers
) -> None: ...
@property
def y(self) -> int: ...
@property
def parameter_numbers(self) -> DSAParameterNumbers: ...
def public_key(self, backend: typing.Any = None) -> dsa.DSAPublicKey: ...
class DSAParameterNumbers:
def __init__(self, p: int, q: int, g: int) -> None: ...
@property
def p(self) -> int: ...
@property
def q(self) -> int: ...
@property
def g(self) -> int: ...
def parameters(self, backend: typing.Any = None) -> dsa.DSAParameters: ...
def generate_parameters(key_size: int) -> dsa.DSAParameters: ...
def private_key_from_ptr(ptr: int) -> dsa.DSAPrivateKey: ...
def public_key_from_ptr(ptr: int) -> dsa.DSAPublicKey: ...
def from_private_numbers(
numbers: dsa.DSAPrivateNumbers,
) -> dsa.DSAPrivateKey: ...
def from_public_numbers(numbers: dsa.DSAPublicNumbers) -> dsa.DSAPublicKey: ...
def from_parameter_numbers(
numbers: dsa.DSAParameterNumbers,
) -> dsa.DSAParameters: ...

View File

@@ -0,0 +1,52 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import typing
from cryptography.hazmat.primitives.asymmetric import ec
class ECPrivateKey: ...
class ECPublicKey: ...
class EllipticCurvePrivateNumbers:
def __init__(
self, private_value: int, public_numbers: EllipticCurvePublicNumbers
) -> None: ...
def private_key(
self, backend: typing.Any = None
) -> ec.EllipticCurvePrivateKey: ...
@property
def private_value(self) -> int: ...
@property
def public_numbers(self) -> EllipticCurvePublicNumbers: ...
class EllipticCurvePublicNumbers:
def __init__(self, x: int, y: int, curve: ec.EllipticCurve) -> None: ...
def public_key(
self, backend: typing.Any = None
) -> ec.EllipticCurvePublicKey: ...
@property
def x(self) -> int: ...
@property
def y(self) -> int: ...
@property
def curve(self) -> ec.EllipticCurve: ...
def __eq__(self, other: object) -> bool: ...
def curve_supported(curve: ec.EllipticCurve) -> bool: ...
def generate_private_key(
curve: ec.EllipticCurve, backend: typing.Any = None
) -> ec.EllipticCurvePrivateKey: ...
def from_private_numbers(
numbers: ec.EllipticCurvePrivateNumbers,
) -> ec.EllipticCurvePrivateKey: ...
def from_public_numbers(
numbers: ec.EllipticCurvePublicNumbers,
) -> ec.EllipticCurvePublicKey: ...
def from_public_bytes(
curve: ec.EllipticCurve, data: bytes
) -> ec.EllipticCurvePublicKey: ...
def derive_private_key(
private_value: int, curve: ec.EllipticCurve
) -> ec.EllipticCurvePrivateKey: ...

View File

@@ -3,12 +3,11 @@
# for complete details.
from cryptography.hazmat.primitives.asymmetric import ed25519
from cryptography.utils import Buffer
class Ed25519PrivateKey: ...
class Ed25519PublicKey: ...
def generate_key() -> ed25519.Ed25519PrivateKey: ...
def private_key_from_ptr(ptr: int) -> ed25519.Ed25519PrivateKey: ...
def public_key_from_ptr(ptr: int) -> ed25519.Ed25519PublicKey: ...
def from_private_bytes(data: bytes) -> ed25519.Ed25519PrivateKey: ...
def from_private_bytes(data: Buffer) -> ed25519.Ed25519PrivateKey: ...
def from_public_bytes(data: bytes) -> ed25519.Ed25519PublicKey: ...

View File

@@ -3,12 +3,11 @@
# for complete details.
from cryptography.hazmat.primitives.asymmetric import ed448
from cryptography.utils import Buffer
class Ed448PrivateKey: ...
class Ed448PublicKey: ...
def generate_key() -> ed448.Ed448PrivateKey: ...
def private_key_from_ptr(ptr: int) -> ed448.Ed448PrivateKey: ...
def public_key_from_ptr(ptr: int) -> ed448.Ed448PublicKey: ...
def from_private_bytes(data: bytes) -> ed448.Ed448PrivateKey: ...
def from_private_bytes(data: Buffer) -> ed448.Ed448PrivateKey: ...
def from_public_bytes(data: bytes) -> ed448.Ed448PublicKey: ...

View File

@@ -5,6 +5,7 @@
import typing
from cryptography.hazmat.primitives import hashes
from cryptography.utils import Buffer
class Hash(hashes.HashContext):
def __init__(
@@ -12,6 +13,16 @@ class Hash(hashes.HashContext):
) -> None: ...
@property
def algorithm(self) -> hashes.HashAlgorithm: ...
def update(self, data: bytes) -> None: ...
def update(self, data: Buffer) -> None: ...
def finalize(self) -> bytes: ...
def copy(self) -> Hash: ...
def hash_supported(algorithm: hashes.HashAlgorithm) -> bool: ...
class XOFHash:
def __init__(self, algorithm: hashes.ExtendableOutputFunction) -> None: ...
@property
def algorithm(self) -> hashes.ExtendableOutputFunction: ...
def update(self, data: Buffer) -> None: ...
def squeeze(self, length: int) -> bytes: ...
def copy(self) -> XOFHash: ...

View File

@@ -5,17 +5,18 @@
import typing
from cryptography.hazmat.primitives import hashes
from cryptography.utils import Buffer
class HMAC(hashes.HashContext):
def __init__(
self,
key: bytes,
key: Buffer,
algorithm: hashes.HashAlgorithm,
backend: typing.Any = None,
) -> None: ...
@property
def algorithm(self) -> hashes.HashAlgorithm: ...
def update(self, data: bytes) -> None: ...
def update(self, data: Buffer) -> None: ...
def finalize(self) -> bytes: ...
def verify(self, signature: bytes) -> None: ...
def copy(self) -> HMAC: ...

View File

@@ -2,21 +2,71 @@
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import typing
from cryptography.hazmat.primitives.hashes import HashAlgorithm
from cryptography.utils import Buffer
def derive_pbkdf2_hmac(
key_material: bytes,
key_material: Buffer,
algorithm: HashAlgorithm,
salt: bytes,
iterations: int,
length: int,
) -> bytes: ...
def derive_scrypt(
key_material: bytes,
salt: bytes,
n: int,
r: int,
p: int,
max_mem: int,
length: int,
) -> bytes: ...
class Scrypt:
def __init__(
self,
salt: bytes,
length: int,
n: int,
r: int,
p: int,
backend: typing.Any = None,
) -> None: ...
def derive(self, key_material: Buffer) -> bytes: ...
def verify(self, key_material: bytes, expected_key: bytes) -> None: ...
class Argon2id:
def __init__(
self,
*,
salt: bytes,
length: int,
iterations: int,
lanes: int,
memory_cost: int,
ad: bytes | None = None,
secret: bytes | None = None,
) -> None: ...
def derive(self, key_material: bytes) -> bytes: ...
def verify(self, key_material: bytes, expected_key: bytes) -> None: ...
def derive_phc_encoded(self, key_material: bytes) -> str: ...
@classmethod
def verify_phc_encoded(
cls, key_material: bytes, phc_encoded: str, secret: bytes | None = None
) -> None: ...
class HKDF:
def __init__(
self,
algorithm: HashAlgorithm,
length: int,
salt: bytes | None,
info: bytes | None,
backend: typing.Any = None,
): ...
def derive(self, key_material: Buffer) -> bytes: ...
def verify(self, key_material: bytes, expected_key: bytes) -> None: ...
class HKDFExpand:
def __init__(
self,
algorithm: HashAlgorithm,
length: int,
info: bytes | None,
backend: typing.Any = None,
): ...
def derive(self, key_material: Buffer) -> bytes: ...
def verify(self, key_material: bytes, expected_key: bytes) -> None: ...

View File

@@ -0,0 +1,34 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import typing
from cryptography.hazmat.primitives.asymmetric.types import (
PrivateKeyTypes,
PublicKeyTypes,
)
from cryptography.utils import Buffer
def load_der_private_key(
data: Buffer,
password: bytes | None,
backend: typing.Any = None,
*,
unsafe_skip_rsa_key_validation: bool = False,
) -> PrivateKeyTypes: ...
def load_pem_private_key(
data: Buffer,
password: bytes | None,
backend: typing.Any = None,
*,
unsafe_skip_rsa_key_validation: bool = False,
) -> PrivateKeyTypes: ...
def load_der_public_key(
data: bytes,
backend: typing.Any = None,
) -> PublicKeyTypes: ...
def load_pem_public_key(
data: bytes,
backend: typing.Any = None,
) -> PublicKeyTypes: ...

View File

@@ -2,12 +2,14 @@
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from cryptography.utils import Buffer
class Poly1305:
def __init__(self, key: bytes) -> None: ...
def __init__(self, key: Buffer) -> None: ...
@staticmethod
def generate_tag(key: bytes, data: bytes) -> bytes: ...
def generate_tag(key: Buffer, data: Buffer) -> bytes: ...
@staticmethod
def verify_tag(key: bytes, data: bytes, tag: bytes) -> None: ...
def update(self, data: bytes) -> None: ...
def verify_tag(key: Buffer, data: Buffer, tag: bytes) -> None: ...
def update(self, data: Buffer) -> None: ...
def finalize(self) -> bytes: ...
def verify(self, tag: bytes) -> None: ...

View File

@@ -0,0 +1,55 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import typing
from cryptography.hazmat.primitives.asymmetric import rsa
class RSAPrivateKey: ...
class RSAPublicKey: ...
class RSAPrivateNumbers:
def __init__(
self,
p: int,
q: int,
d: int,
dmp1: int,
dmq1: int,
iqmp: int,
public_numbers: RSAPublicNumbers,
) -> None: ...
@property
def p(self) -> int: ...
@property
def q(self) -> int: ...
@property
def d(self) -> int: ...
@property
def dmp1(self) -> int: ...
@property
def dmq1(self) -> int: ...
@property
def iqmp(self) -> int: ...
@property
def public_numbers(self) -> RSAPublicNumbers: ...
def private_key(
self,
backend: typing.Any = None,
*,
unsafe_skip_rsa_key_validation: bool = False,
) -> rsa.RSAPrivateKey: ...
class RSAPublicNumbers:
def __init__(self, e: int, n: int) -> None: ...
@property
def n(self) -> int: ...
@property
def e(self) -> int: ...
def public_key(self, backend: typing.Any = None) -> rsa.RSAPublicKey: ...
def generate_private_key(
public_exponent: int,
key_size: int,
) -> rsa.RSAPrivateKey: ...

View File

@@ -3,12 +3,11 @@
# for complete details.
from cryptography.hazmat.primitives.asymmetric import x25519
from cryptography.utils import Buffer
class X25519PrivateKey: ...
class X25519PublicKey: ...
def generate_key() -> x25519.X25519PrivateKey: ...
def private_key_from_ptr(ptr: int) -> x25519.X25519PrivateKey: ...
def public_key_from_ptr(ptr: int) -> x25519.X25519PublicKey: ...
def from_private_bytes(data: bytes) -> x25519.X25519PrivateKey: ...
def from_private_bytes(data: Buffer) -> x25519.X25519PrivateKey: ...
def from_public_bytes(data: bytes) -> x25519.X25519PublicKey: ...

View File

@@ -3,12 +3,11 @@
# for complete details.
from cryptography.hazmat.primitives.asymmetric import x448
from cryptography.utils import Buffer
class X448PrivateKey: ...
class X448PublicKey: ...
def generate_key() -> x448.X448PrivateKey: ...
def private_key_from_ptr(ptr: int) -> x448.X448PrivateKey: ...
def public_key_from_ptr(ptr: int) -> x448.X448PublicKey: ...
def from_private_bytes(data: bytes) -> x448.X448PrivateKey: ...
def from_private_bytes(data: Buffer) -> x448.X448PrivateKey: ...
def from_public_bytes(data: bytes) -> x448.X448PublicKey: ...

View File

@@ -0,0 +1,52 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import typing
from collections.abc import Iterable
from cryptography import x509
from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
from cryptography.hazmat.primitives.serialization import (
KeySerializationEncryption,
)
from cryptography.hazmat.primitives.serialization.pkcs12 import (
PKCS12KeyAndCertificates,
PKCS12PrivateKeyTypes,
)
from cryptography.utils import Buffer
class PKCS12Certificate:
def __init__(
self, cert: x509.Certificate, friendly_name: bytes | None
) -> None: ...
@property
def friendly_name(self) -> bytes | None: ...
@property
def certificate(self) -> x509.Certificate: ...
def load_key_and_certificates(
data: Buffer,
password: Buffer | None,
backend: typing.Any = None,
) -> tuple[
PrivateKeyTypes | None,
x509.Certificate | None,
list[x509.Certificate],
]: ...
def load_pkcs12(
data: bytes,
password: bytes | None,
backend: typing.Any = None,
) -> PKCS12KeyAndCertificates: ...
def serialize_java_truststore(
certs: Iterable[PKCS12Certificate],
encryption_algorithm: KeySerializationEncryption,
) -> bytes: ...
def serialize_key_and_certificates(
name: bytes | None,
key: PKCS12PrivateKeyTypes | None,
cert: x509.Certificate | None,
cas: Iterable[x509.Certificate | PKCS12Certificate] | None,
encryption_algorithm: KeySerializationEncryption,
) -> bytes: ...

View File

@@ -1,15 +1,50 @@
import typing
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from collections.abc import Iterable
from cryptography import x509
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives.serialization import pkcs7
def serialize_certificates(
certs: typing.List[x509.Certificate],
certs: list[x509.Certificate],
encoding: serialization.Encoding,
) -> bytes: ...
def encrypt_and_serialize(
builder: pkcs7.PKCS7EnvelopeBuilder,
content_encryption_algorithm: pkcs7.ContentEncryptionAlgorithm,
encoding: serialization.Encoding,
options: Iterable[pkcs7.PKCS7Options],
) -> bytes: ...
def sign_and_serialize(
builder: pkcs7.PKCS7SignatureBuilder,
encoding: serialization.Encoding,
options: typing.Iterable[pkcs7.PKCS7Options],
options: Iterable[pkcs7.PKCS7Options],
) -> bytes: ...
def decrypt_der(
data: bytes,
certificate: x509.Certificate,
private_key: rsa.RSAPrivateKey,
options: Iterable[pkcs7.PKCS7Options],
) -> bytes: ...
def decrypt_pem(
data: bytes,
certificate: x509.Certificate,
private_key: rsa.RSAPrivateKey,
options: Iterable[pkcs7.PKCS7Options],
) -> bytes: ...
def decrypt_smime(
data: bytes,
certificate: x509.Certificate,
private_key: rsa.RSAPrivateKey,
options: Iterable[pkcs7.PKCS7Options],
) -> bytes: ...
def load_pem_pkcs7_certificates(
data: bytes,
) -> list[x509.Certificate]: ...
def load_der_pkcs7_certificates(
data: bytes,
) -> list[x509.Certificate]: ...

View File

@@ -0,0 +1,23 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from cryptography import x509
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.serialization import pkcs7
from cryptography.utils import Buffer
class TestCertificate:
not_after_tag: int
not_before_tag: int
issuer_value_tags: list[int]
subject_value_tags: list[int]
def test_parse_certificate(data: bytes) -> TestCertificate: ...
def pkcs7_verify(
encoding: serialization.Encoding,
sig: bytes,
msg: Buffer | None,
certs: list[x509.Certificate],
options: list[pkcs7.PKCS7Options],
) -> None: ...

View File

@@ -2,43 +2,300 @@
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import datetime
import typing
from collections.abc import Iterator
from cryptography import x509
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric.ec import ECDSA
from cryptography.hazmat.primitives.asymmetric.padding import PSS, PKCS1v15
from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
from cryptography.hazmat.primitives.asymmetric.types import (
CertificateIssuerPublicKeyTypes,
CertificatePublicKeyTypes,
PrivateKeyTypes,
)
from cryptography.x509 import certificate_transparency
def load_pem_x509_certificate(data: bytes) -> x509.Certificate: ...
def load_pem_x509_certificate(
data: bytes, backend: typing.Any = None
) -> x509.Certificate: ...
def load_der_x509_certificate(
data: bytes, backend: typing.Any = None
) -> x509.Certificate: ...
def load_pem_x509_certificates(
data: bytes,
) -> typing.List[x509.Certificate]: ...
def load_der_x509_certificate(data: bytes) -> x509.Certificate: ...
def load_pem_x509_crl(data: bytes) -> x509.CertificateRevocationList: ...
def load_der_x509_crl(data: bytes) -> x509.CertificateRevocationList: ...
def load_pem_x509_csr(data: bytes) -> x509.CertificateSigningRequest: ...
def load_der_x509_csr(data: bytes) -> x509.CertificateSigningRequest: ...
) -> list[x509.Certificate]: ...
def load_pem_x509_crl(
data: bytes, backend: typing.Any = None
) -> x509.CertificateRevocationList: ...
def load_der_x509_crl(
data: bytes, backend: typing.Any = None
) -> x509.CertificateRevocationList: ...
def load_pem_x509_csr(
data: bytes, backend: typing.Any = None
) -> x509.CertificateSigningRequest: ...
def load_der_x509_csr(
data: bytes, backend: typing.Any = None
) -> x509.CertificateSigningRequest: ...
def encode_name_bytes(name: x509.Name) -> bytes: ...
def encode_extension_value(extension: x509.ExtensionType) -> bytes: ...
def create_x509_certificate(
builder: x509.CertificateBuilder,
private_key: PrivateKeyTypes,
hash_algorithm: typing.Optional[hashes.HashAlgorithm],
padding: typing.Optional[typing.Union[PKCS1v15, PSS]],
hash_algorithm: hashes.HashAlgorithm | None,
rsa_padding: PKCS1v15 | PSS | None,
ecdsa_deterministic: bool | None,
) -> x509.Certificate: ...
def create_x509_csr(
builder: x509.CertificateSigningRequestBuilder,
private_key: PrivateKeyTypes,
hash_algorithm: typing.Optional[hashes.HashAlgorithm],
hash_algorithm: hashes.HashAlgorithm | None,
rsa_padding: PKCS1v15 | PSS | None,
ecdsa_deterministic: bool | None,
) -> x509.CertificateSigningRequest: ...
def create_x509_crl(
builder: x509.CertificateRevocationListBuilder,
private_key: PrivateKeyTypes,
hash_algorithm: typing.Optional[hashes.HashAlgorithm],
hash_algorithm: hashes.HashAlgorithm | None,
rsa_padding: PKCS1v15 | PSS | None,
ecdsa_deterministic: bool | None,
) -> x509.CertificateRevocationList: ...
class Sct: ...
class Certificate: ...
class Sct:
@property
def version(self) -> certificate_transparency.Version: ...
@property
def log_id(self) -> bytes: ...
@property
def timestamp(self) -> datetime.datetime: ...
@property
def entry_type(self) -> certificate_transparency.LogEntryType: ...
@property
def signature_hash_algorithm(self) -> hashes.HashAlgorithm: ...
@property
def signature_algorithm(
self,
) -> certificate_transparency.SignatureAlgorithm: ...
@property
def signature(self) -> bytes: ...
@property
def extension_bytes(self) -> bytes: ...
class Certificate:
def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes: ...
@property
def serial_number(self) -> int: ...
@property
def version(self) -> x509.Version: ...
def public_key(self) -> CertificatePublicKeyTypes: ...
@property
def public_key_algorithm_oid(self) -> x509.ObjectIdentifier: ...
@property
def not_valid_before(self) -> datetime.datetime: ...
@property
def not_valid_before_utc(self) -> datetime.datetime: ...
@property
def not_valid_after(self) -> datetime.datetime: ...
@property
def not_valid_after_utc(self) -> datetime.datetime: ...
@property
def issuer(self) -> x509.Name: ...
@property
def subject(self) -> x509.Name: ...
@property
def signature_hash_algorithm(
self,
) -> hashes.HashAlgorithm | None: ...
@property
def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ...
@property
def signature_algorithm_parameters(
self,
) -> PSS | PKCS1v15 | ECDSA | None: ...
@property
def extensions(self) -> x509.Extensions: ...
@property
def signature(self) -> bytes: ...
@property
def tbs_certificate_bytes(self) -> bytes: ...
@property
def tbs_precertificate_bytes(self) -> bytes: ...
def __eq__(self, other: object) -> bool: ...
def __hash__(self) -> int: ...
def public_bytes(self, encoding: serialization.Encoding) -> bytes: ...
def verify_directly_issued_by(self, issuer: Certificate) -> None: ...
class RevokedCertificate: ...
class CertificateRevocationList: ...
class CertificateSigningRequest: ...
class CertificateRevocationList:
def public_bytes(self, encoding: serialization.Encoding) -> bytes: ...
def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes: ...
def get_revoked_certificate_by_serial_number(
self, serial_number: int
) -> x509.RevokedCertificate | None: ...
@property
def signature_hash_algorithm(
self,
) -> hashes.HashAlgorithm | None: ...
@property
def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ...
@property
def signature_algorithm_parameters(
self,
) -> PSS | PKCS1v15 | ECDSA | None: ...
@property
def issuer(self) -> x509.Name: ...
@property
def next_update(self) -> datetime.datetime | None: ...
@property
def next_update_utc(self) -> datetime.datetime | None: ...
@property
def last_update(self) -> datetime.datetime: ...
@property
def last_update_utc(self) -> datetime.datetime: ...
@property
def extensions(self) -> x509.Extensions: ...
@property
def signature(self) -> bytes: ...
@property
def tbs_certlist_bytes(self) -> bytes: ...
def __eq__(self, other: object) -> bool: ...
def __len__(self) -> int: ...
@typing.overload
def __getitem__(self, idx: int) -> x509.RevokedCertificate: ...
@typing.overload
def __getitem__(self, idx: slice) -> list[x509.RevokedCertificate]: ...
def __iter__(self) -> Iterator[x509.RevokedCertificate]: ...
def is_signature_valid(
self, public_key: CertificateIssuerPublicKeyTypes
) -> bool: ...
class CertificateSigningRequest:
def __eq__(self, other: object) -> bool: ...
def __hash__(self) -> int: ...
def public_key(self) -> CertificatePublicKeyTypes: ...
@property
def subject(self) -> x509.Name: ...
@property
def signature_hash_algorithm(
self,
) -> hashes.HashAlgorithm | None: ...
@property
def signature_algorithm_oid(self) -> x509.ObjectIdentifier: ...
@property
def signature_algorithm_parameters(
self,
) -> PSS | PKCS1v15 | ECDSA | None: ...
@property
def extensions(self) -> x509.Extensions: ...
@property
def attributes(self) -> x509.Attributes: ...
def public_bytes(self, encoding: serialization.Encoding) -> bytes: ...
@property
def signature(self) -> bytes: ...
@property
def tbs_certrequest_bytes(self) -> bytes: ...
@property
def is_signature_valid(self) -> bool: ...
class PolicyBuilder:
def time(self, time: datetime.datetime) -> PolicyBuilder: ...
def store(self, store: Store) -> PolicyBuilder: ...
def max_chain_depth(self, max_chain_depth: int) -> PolicyBuilder: ...
def extension_policies(
self, *, ca_policy: ExtensionPolicy, ee_policy: ExtensionPolicy
) -> PolicyBuilder: ...
def build_client_verifier(self) -> ClientVerifier: ...
def build_server_verifier(
self, subject: x509.verification.Subject
) -> ServerVerifier: ...
class Policy:
@property
def max_chain_depth(self) -> int: ...
@property
def subject(self) -> x509.verification.Subject | None: ...
@property
def validation_time(self) -> datetime.datetime: ...
@property
def extended_key_usage(self) -> x509.ObjectIdentifier: ...
@property
def minimum_rsa_modulus(self) -> int: ...
class Criticality:
CRITICAL: Criticality
AGNOSTIC: Criticality
NON_CRITICAL: Criticality
T = typing.TypeVar("T", contravariant=True, bound=x509.ExtensionType)
MaybeExtensionValidatorCallback = typing.Callable[
[
Policy,
x509.Certificate,
T | None,
],
None,
]
PresentExtensionValidatorCallback = typing.Callable[
[Policy, x509.Certificate, T],
None,
]
class ExtensionPolicy:
@staticmethod
def permit_all() -> ExtensionPolicy: ...
@staticmethod
def webpki_defaults_ca() -> ExtensionPolicy: ...
@staticmethod
def webpki_defaults_ee() -> ExtensionPolicy: ...
def require_not_present(
self, extension_type: type[x509.ExtensionType]
) -> ExtensionPolicy: ...
def may_be_present(
self,
extension_type: type[T],
criticality: Criticality,
validator: MaybeExtensionValidatorCallback[T] | None,
) -> ExtensionPolicy: ...
def require_present(
self,
extension_type: type[T],
criticality: Criticality,
validator: PresentExtensionValidatorCallback[T] | None,
) -> ExtensionPolicy: ...
class VerifiedClient:
@property
def subjects(self) -> list[x509.GeneralName] | None: ...
@property
def chain(self) -> list[x509.Certificate]: ...
class ClientVerifier:
@property
def policy(self) -> Policy: ...
@property
def store(self) -> Store: ...
def verify(
self,
leaf: x509.Certificate,
intermediates: list[x509.Certificate],
) -> VerifiedClient: ...
class ServerVerifier:
@property
def policy(self) -> Policy: ...
@property
def store(self) -> Store: ...
def verify(
self,
leaf: x509.Certificate,
intermediates: list[x509.Certificate],
) -> list[x509.Certificate]: ...
class Store:
def __init__(self, certs: list[x509.Certificate]) -> None: ...
class VerificationError(Exception): ...

Some files were not shown because too many files have changed in this diff Show More