+# endif
+#endif
+'''
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cffi/verifier.py b/Voltage Vikings/lib/python3.9/site-packages/cffi/verifier.py
new file mode 100644
index 00000000..a500c781
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cffi/verifier.py
@@ -0,0 +1,307 @@
+#
+# DEPRECATED: implementation for ffi.verify()
+#
+import sys, os, binascii, shutil, io
+from . import __version_verifier_modules__
+from . import ffiplatform
+from .error import VerificationError
+
+if sys.version_info >= (3, 3):
+ import importlib.machinery
+ def _extension_suffixes():
+ return importlib.machinery.EXTENSION_SUFFIXES[:]
+else:
+ import imp
+ def _extension_suffixes():
+ return [suffix for suffix, _, type in imp.get_suffixes()
+ if type == imp.C_EXTENSION]
+
+
+if sys.version_info >= (3,):
+ NativeIO = io.StringIO
+else:
+ class NativeIO(io.BytesIO):
+ def write(self, s):
+ if isinstance(s, unicode):
+ s = s.encode('ascii')
+ super(NativeIO, self).write(s)
+
+
+class Verifier(object):
+
+ def __init__(self, ffi, preamble, tmpdir=None, modulename=None,
+ ext_package=None, tag='', force_generic_engine=False,
+ source_extension='.c', flags=None, relative_to=None, **kwds):
+ if ffi._parser._uses_new_feature:
+ raise VerificationError(
+ "feature not supported with ffi.verify(), but only "
+ "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,))
+ self.ffi = ffi
+ self.preamble = preamble
+ if not modulename:
+ flattened_kwds = ffiplatform.flatten(kwds)
+ vengine_class = _locate_engine_class(ffi, force_generic_engine)
+ self._vengine = vengine_class(self)
+ self._vengine.patch_extension_kwds(kwds)
+ self.flags = flags
+ self.kwds = self.make_relative_to(kwds, relative_to)
+ #
+ if modulename:
+ if tag:
+ raise TypeError("can't specify both 'modulename' and 'tag'")
+ else:
+ key = '\x00'.join(['%d.%d' % sys.version_info[:2],
+ __version_verifier_modules__,
+ preamble, flattened_kwds] +
+ ffi._cdefsources)
+ if sys.version_info >= (3,):
+ key = key.encode('utf-8')
+ k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff)
+ k1 = k1.lstrip('0x').rstrip('L')
+ k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff)
+ k2 = k2.lstrip('0').rstrip('L')
+ modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key,
+ k1, k2)
+ suffix = _get_so_suffixes()[0]
+ self.tmpdir = tmpdir or _caller_dir_pycache()
+ self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension)
+ self.modulefilename = os.path.join(self.tmpdir, modulename + suffix)
+ self.ext_package = ext_package
+ self._has_source = False
+ self._has_module = False
+
+ def write_source(self, file=None):
+ """Write the C source code. It is produced in 'self.sourcefilename',
+ which can be tweaked beforehand."""
+ with self.ffi._lock:
+ if self._has_source and file is None:
+ raise VerificationError(
+ "source code already written")
+ self._write_source(file)
+
+ def compile_module(self):
+ """Write the C source code (if not done already) and compile it.
+ This produces a dynamic link library in 'self.modulefilename'."""
+ with self.ffi._lock:
+ if self._has_module:
+ raise VerificationError("module already compiled")
+ if not self._has_source:
+ self._write_source()
+ self._compile_module()
+
+ def load_library(self):
+ """Get a C module from this Verifier instance.
+ Returns an instance of a FFILibrary class that behaves like the
+ objects returned by ffi.dlopen(), but that delegates all
+ operations to the C module. If necessary, the C code is written
+ and compiled first.
+ """
+ with self.ffi._lock:
+ if not self._has_module:
+ self._locate_module()
+ if not self._has_module:
+ if not self._has_source:
+ self._write_source()
+ self._compile_module()
+ return self._load_library()
+
+ def get_module_name(self):
+ basename = os.path.basename(self.modulefilename)
+ # kill both the .so extension and the other .'s, as introduced
+ # by Python 3: 'basename.cpython-33m.so'
+ basename = basename.split('.', 1)[0]
+ # and the _d added in Python 2 debug builds --- but try to be
+ # conservative and not kill a legitimate _d
+ if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'):
+ basename = basename[:-2]
+ return basename
+
+ def get_extension(self):
+ ffiplatform._hack_at_distutils() # backward compatibility hack
+ if not self._has_source:
+ with self.ffi._lock:
+ if not self._has_source:
+ self._write_source()
+ sourcename = ffiplatform.maybe_relative_path(self.sourcefilename)
+ modname = self.get_module_name()
+ return ffiplatform.get_extension(sourcename, modname, **self.kwds)
+
+ def generates_python_module(self):
+ return self._vengine._gen_python_module
+
+ def make_relative_to(self, kwds, relative_to):
+ if relative_to and os.path.dirname(relative_to):
+ dirname = os.path.dirname(relative_to)
+ kwds = kwds.copy()
+ for key in ffiplatform.LIST_OF_FILE_NAMES:
+ if key in kwds:
+ lst = kwds[key]
+ if not isinstance(lst, (list, tuple)):
+ raise TypeError("keyword '%s' should be a list or tuple"
+ % (key,))
+ lst = [os.path.join(dirname, fn) for fn in lst]
+ kwds[key] = lst
+ return kwds
+
+ # ----------
+
+ def _locate_module(self):
+ if not os.path.isfile(self.modulefilename):
+ if self.ext_package:
+ try:
+ pkg = __import__(self.ext_package, None, None, ['__doc__'])
+ except ImportError:
+ return # cannot import the package itself, give up
+ # (e.g. it might be called differently before installation)
+ path = pkg.__path__
+ else:
+ path = None
+ filename = self._vengine.find_module(self.get_module_name(), path,
+ _get_so_suffixes())
+ if filename is None:
+ return
+ self.modulefilename = filename
+ self._vengine.collect_types()
+ self._has_module = True
+
+ def _write_source_to(self, file):
+ self._vengine._f = file
+ try:
+ self._vengine.write_source_to_f()
+ finally:
+ del self._vengine._f
+
+ def _write_source(self, file=None):
+ if file is not None:
+ self._write_source_to(file)
+ else:
+ # Write our source file to an in memory file.
+ f = NativeIO()
+ self._write_source_to(f)
+ source_data = f.getvalue()
+
+ # Determine if this matches the current file
+ if os.path.exists(self.sourcefilename):
+ with open(self.sourcefilename, "r") as fp:
+ needs_written = not (fp.read() == source_data)
+ else:
+ needs_written = True
+
+ # Actually write the file out if it doesn't match
+ if needs_written:
+ _ensure_dir(self.sourcefilename)
+ with open(self.sourcefilename, "w") as fp:
+ fp.write(source_data)
+
+ # Set this flag
+ self._has_source = True
+
+ def _compile_module(self):
+ # compile this C source
+ tmpdir = os.path.dirname(self.sourcefilename)
+ outputfilename = ffiplatform.compile(tmpdir, self.get_extension())
+ try:
+ same = ffiplatform.samefile(outputfilename, self.modulefilename)
+ except OSError:
+ same = False
+ if not same:
+ _ensure_dir(self.modulefilename)
+ shutil.move(outputfilename, self.modulefilename)
+ self._has_module = True
+
+ def _load_library(self):
+ assert self._has_module
+ if self.flags is not None:
+ return self._vengine.load_library(self.flags)
+ else:
+ return self._vengine.load_library()
+
+# ____________________________________________________________
+
+_FORCE_GENERIC_ENGINE = False # for tests
+
+def _locate_engine_class(ffi, force_generic_engine):
+ if _FORCE_GENERIC_ENGINE:
+ force_generic_engine = True
+ if not force_generic_engine:
+ if '__pypy__' in sys.builtin_module_names:
+ force_generic_engine = True
+ else:
+ try:
+ import _cffi_backend
+ except ImportError:
+ _cffi_backend = '?'
+ if ffi._backend is not _cffi_backend:
+ force_generic_engine = True
+ if force_generic_engine:
+ from . import vengine_gen
+ return vengine_gen.VGenericEngine
+ else:
+ from . import vengine_cpy
+ return vengine_cpy.VCPythonEngine
+
+# ____________________________________________________________
+
+_TMPDIR = None
+
+def _caller_dir_pycache():
+ if _TMPDIR:
+ return _TMPDIR
+ result = os.environ.get('CFFI_TMPDIR')
+ if result:
+ return result
+ filename = sys._getframe(2).f_code.co_filename
+ return os.path.abspath(os.path.join(os.path.dirname(filename),
+ '__pycache__'))
+
+def set_tmpdir(dirname):
+ """Set the temporary directory to use instead of __pycache__."""
+ global _TMPDIR
+ _TMPDIR = dirname
+
+def cleanup_tmpdir(tmpdir=None, keep_so=False):
+ """Clean up the temporary directory by removing all files in it
+ called `_cffi_*.{c,so}` as well as the `build` subdirectory."""
+ tmpdir = tmpdir or _caller_dir_pycache()
+ try:
+ filelist = os.listdir(tmpdir)
+ except OSError:
+ return
+ if keep_so:
+ suffix = '.c' # only remove .c files
+ else:
+ suffix = _get_so_suffixes()[0].lower()
+ for fn in filelist:
+ if fn.lower().startswith('_cffi_') and (
+ fn.lower().endswith(suffix) or fn.lower().endswith('.c')):
+ try:
+ os.unlink(os.path.join(tmpdir, fn))
+ except OSError:
+ pass
+ clean_dir = [os.path.join(tmpdir, 'build')]
+ for dir in clean_dir:
+ try:
+ for fn in os.listdir(dir):
+ fn = os.path.join(dir, fn)
+ if os.path.isdir(fn):
+ clean_dir.append(fn)
+ else:
+ os.unlink(fn)
+ except OSError:
+ pass
+
+def _get_so_suffixes():
+ suffixes = _extension_suffixes()
+ if not suffixes:
+ # bah, no C_EXTENSION available. Occurs on pypy without cpyext
+ if sys.platform == 'win32':
+ suffixes = [".pyd"]
+ else:
+ suffixes = [".so"]
+
+ return suffixes
+
+def _ensure_dir(filename):
+ dirname = os.path.dirname(filename)
+ if dirname and not os.path.isdir(dirname):
+ os.makedirs(dirname)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/INSTALLER b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/LICENSE b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/LICENSE
new file mode 100644
index 00000000..ad82355b
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2019 TAHRI Ahmed R.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/METADATA b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/METADATA
new file mode 100644
index 00000000..867b915c
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/METADATA
@@ -0,0 +1,616 @@
+Metadata-Version: 2.1
+Name: charset-normalizer
+Version: 3.1.0
+Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.
+Home-page: https://github.com/Ousret/charset_normalizer
+Author: Ahmed TAHRI
+Author-email: ahmed.tahri@cloudnursery.dev
+License: MIT
+Project-URL: Bug Reports, https://github.com/Ousret/charset_normalizer/issues
+Project-URL: Documentation, https://charset-normalizer.readthedocs.io/en/latest
+Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Intended Audience :: Developers
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Text Processing :: Linguistic
+Classifier: Topic :: Utilities
+Classifier: Typing :: Typed
+Requires-Python: >=3.7.0
+Description-Content-Type: text/markdown
+License-File: LICENSE
+Provides-Extra: unicode_backport
+
+Charset Detection, for Everyone 👋 
+
+
+ The Real First Universal Charset Detector
+
+
+
+
+
+
+
+
+
+
+
+> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`,
+> I'm trying to resolve the issue by taking a new approach.
+> All IANA character set names for which the Python core library provides codecs are supported.
+
+
+ >>>>> 👉 Try Me Online Now, Then Adopt Me 👈 <<<<<
+
+
+This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**.
+
+| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) |
+|--------------------------------------------------|:---------------------------------------------:|:------------------------------------------------------------------------------------------------------:|:-----------------------------------------------:|
+| `Fast` | ❌
| ✅
| ✅
|
+| `Universal**` | ❌ | ✅ | ❌ |
+| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ |
+| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ |
+| `License` | LGPL-2.1
_restrictive_ | MIT | MPL-1.1
_restrictive_ |
+| `Native Python` | ✅ | ✅ | ❌ |
+| `Detect spoken language` | ❌ | ✅ | N/A |
+| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ |
+| `Whl Size` | 193.6 kB | 39.5 kB | ~200 kB |
+| `Supported Encoding` | 33 | :tada: [90](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 |
+
+
+
+
+*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*
+Did you got there because of the logs? See [https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html](https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html)
+
+## ⭐ Your support
+
+*Fork, test-it, star-it, submit your ideas! We do listen.*
+
+## ⚡ Performance
+
+This package offer better performance than its counterpart Chardet. Here are some numbers.
+
+| Package | Accuracy | Mean per file (ms) | File per sec (est) |
+|-----------------------------------------------|:--------:|:------------------:|:------------------:|
+| [chardet](https://github.com/chardet/chardet) | 86 % | 200 ms | 5 file/sec |
+| charset-normalizer | **98 %** | **10 ms** | 100 file/sec |
+
+| Package | 99th percentile | 95th percentile | 50th percentile |
+|-----------------------------------------------|:---------------:|:---------------:|:---------------:|
+| [chardet](https://github.com/chardet/chardet) | 1200 ms | 287 ms | 23 ms |
+| charset-normalizer | 100 ms | 50 ms | 5 ms |
+
+Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload.
+
+> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows.
+> And yes, these results might change at any time. The dataset can be updated to include more files.
+> The actual delays heavily depends on your CPU capabilities. The factors should remain the same.
+> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability
+> (eg. Supported Encoding) Challenge-them if you want.
+
+## ✨ Installation
+
+Using PyPi for latest stable
+```sh
+pip install charset-normalizer -U
+```
+
+## 🚀 Basic Usage
+
+### CLI
+This package comes with a CLI.
+
+```
+usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD]
+ file [file ...]
+
+The Real First Universal Charset Detector. Discover originating encoding used
+on text file. Normalize text to unicode.
+
+positional arguments:
+ files File(s) to be analysed
+
+optional arguments:
+ -h, --help show this help message and exit
+ -v, --verbose Display complementary information about file if any.
+ Stdout will contain logs about the detection process.
+ -a, --with-alternative
+ Output complementary possibilities if any. Top-level
+ JSON WILL be a list.
+ -n, --normalize Permit to normalize input file. If not set, program
+ does not write anything.
+ -m, --minimal Only output the charset detected to STDOUT. Disabling
+ JSON output.
+ -r, --replace Replace file when trying to normalize it instead of
+ creating a new one.
+ -f, --force Replace file without asking if you are sure, use this
+ flag with caution.
+ -t THRESHOLD, --threshold THRESHOLD
+ Define a custom maximum amount of chaos allowed in
+ decoded content. 0. <= chaos <= 1.
+ --version Show version information and exit.
+```
+
+```bash
+normalizer ./data/sample.1.fr.srt
+```
+
+:tada: Since version 1.4.0 the CLI produce easily usable stdout result in JSON format.
+
+```json
+{
+ "path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt",
+ "encoding": "cp1252",
+ "encoding_aliases": [
+ "1252",
+ "windows_1252"
+ ],
+ "alternative_encodings": [
+ "cp1254",
+ "cp1256",
+ "cp1258",
+ "iso8859_14",
+ "iso8859_15",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_9",
+ "latin_1",
+ "mbcs"
+ ],
+ "language": "French",
+ "alphabets": [
+ "Basic Latin",
+ "Latin-1 Supplement"
+ ],
+ "has_sig_or_bom": false,
+ "chaos": 0.149,
+ "coherence": 97.152,
+ "unicode_path": null,
+ "is_preferred": true
+}
+```
+
+### Python
+*Just print out normalized text*
+```python
+from charset_normalizer import from_path
+
+results = from_path('./my_subtitle.srt')
+
+print(str(results.best()))
+```
+
+*Upgrade your code without effort*
+```python
+from charset_normalizer import detect
+```
+
+The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible.
+
+See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/)
+
+## 😇 Why
+
+When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a
+reliable alternative using a completely different method. Also! I never back down on a good challenge!
+
+I **don't care** about the **originating charset** encoding, because **two different tables** can
+produce **two identical rendered string.**
+What I want is to get readable text, the best I can.
+
+In a way, **I'm brute forcing text decoding.** How cool is that ? 😎
+
+Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode.
+
+## 🍰 How
+
+ - Discard all charset encoding table that could not fit the binary content.
+ - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding.
+ - Extract matches with the lowest mess detected.
+ - Additionally, we measure coherence / probe for a language.
+
+**Wait a minute**, what is noise/mess and coherence according to **YOU ?**
+
+*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then
+**I established** some ground rules about **what is obvious** when **it seems like** a mess.
+ I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to
+ improve or rewrite it.
+
+*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought
+that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design.
+
+## ⚡ Known limitations
+
+ - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters))
+ - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content.
+
+## ⚠️ About Python EOLs
+
+**If you are running:**
+
+- Python >=2.7,<3.5: Unsupported
+- Python 3.5: charset-normalizer < 2.1
+- Python 3.6: charset-normalizer < 3.1
+
+Upgrade your Python interpreter as soon as possible.
+
+## 👤 Contributing
+
+Contributions, issues and feature requests are very much welcome.
+Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute.
+
+## 📝 License
+
+Copyright © [Ahmed TAHRI @Ousret](https://github.com/Ousret).
+This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed.
+
+Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/)
+
+## 💼 For Enterprise
+
+Professional support for charset-normalizer is available as part of the [Tidelift
+Subscription][1]. Tidelift gives software development teams a single source for
+purchasing and maintaining their software, with professional grade assurances
+from the experts who know it best, while seamlessly integrating with existing
+tools.
+
+[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme
+
+# Changelog
+All notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
+
+## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06)
+
+### Added
+- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262)
+
+### Removed
+- Support for Python 3.6 (PR #260)
+
+### Changed
+- Optional speedup provided by mypy/c 1.0.1
+
+## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18)
+
+### Fixed
+- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233)
+
+### Changed
+- Speedup provided by mypy/c 0.990 on Python >= 3.7
+
+## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20)
+
+### Added
+- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results
+- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES
+- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio
+- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)
+
+### Changed
+- Build with static metadata using 'build' frontend
+- Make the language detection stricter
+- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1
+
+### Fixed
+- CLI with opt --normalize fail when using full path for files
+- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it
+- Sphinx warnings when generating the documentation
+
+### Removed
+- Coherence detector no longer return 'Simple English' instead return 'English'
+- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'
+- Breaking: Method `first()` and `best()` from CharsetMatch
+- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII)
+- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches
+- Breaking: Top-level function `normalize`
+- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch
+- Support for the backport `unicodedata2`
+
+## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18)
+
+### Added
+- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results
+- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES
+- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio
+
+### Changed
+- Build with static metadata using 'build' frontend
+- Make the language detection stricter
+
+### Fixed
+- CLI with opt --normalize fail when using full path for files
+- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it
+
+### Removed
+- Coherence detector no longer return 'Simple English' instead return 'English'
+- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'
+
+## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21)
+
+### Added
+- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)
+
+### Removed
+- Breaking: Method `first()` and `best()` from CharsetMatch
+- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII)
+
+### Fixed
+- Sphinx warnings when generating the documentation
+
+## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15)
+
+### Changed
+- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1
+
+### Removed
+- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches
+- Breaking: Top-level function `normalize`
+- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch
+- Support for the backport `unicodedata2`
+
+## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19)
+
+### Deprecated
+- Function `normalize` scheduled for removal in 3.0
+
+### Changed
+- Removed useless call to decode in fn is_unprintable (#206)
+
+### Fixed
+- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204)
+
+## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19)
+
+### Added
+- Output the Unicode table version when running the CLI with `--version` (PR #194)
+
+### Changed
+- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175)
+- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183)
+
+### Fixed
+- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175)
+- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181)
+
+### Removed
+- Support for Python 3.5 (PR #192)
+
+### Deprecated
+- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194)
+
+## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12)
+
+### Fixed
+- ASCII miss-detection on rare cases (PR #170)
+
+## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30)
+
+### Added
+- Explicit support for Python 3.11 (PR #164)
+
+### Changed
+- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165)
+
+## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04)
+
+### Fixed
+- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154)
+
+### Changed
+- Skipping the language-detection (CD) on ASCII (PR #155)
+
+## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03)
+
+### Changed
+- Moderating the logging impact (since 2.0.8) for specific environments (PR #147)
+
+### Fixed
+- Wrong logging level applied when setting kwarg `explain` to True (PR #146)
+
+## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24)
+### Changed
+- Improvement over Vietnamese detection (PR #126)
+- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124)
+- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122)
+- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129)
+- Code style as refactored by Sourcery-AI (PR #131)
+- Minor adjustment on the MD around european words (PR #133)
+- Remove and replace SRTs from assets / tests (PR #139)
+- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135)
+- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135)
+
+### Fixed
+- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137)
+- Avoid using too insignificant chunk (PR #137)
+
+### Added
+- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135)
+- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141)
+
+## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11)
+### Added
+- Add support for Kazakh (Cyrillic) language detection (PR #109)
+
+### Changed
+- Further, improve inferring the language from a given single-byte code page (PR #112)
+- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116)
+- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113)
+- Various detection improvement (MD+CD) (PR #117)
+
+### Removed
+- Remove redundant logging entry about detected language(s) (PR #115)
+
+### Fixed
+- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102)
+
+## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18)
+### Fixed
+- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100)
+- Fix CLI crash when using --minimal output in certain cases (PR #103)
+
+### Changed
+- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101)
+
+## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14)
+### Changed
+- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81)
+- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82)
+- The Unicode detection is slightly improved (PR #93)
+- Add syntax sugar \_\_bool\_\_ for results CharsetMatches list-container (PR #91)
+
+### Removed
+- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92)
+
+### Fixed
+- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95)
+- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96)
+- The MANIFEST.in was not exhaustive (PR #78)
+
+## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30)
+### Fixed
+- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70)
+- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68)
+- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72)
+- Submatch factoring could be wrong in rare edge cases (PR #72)
+- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72)
+- Fix line endings from CRLF to LF for certain project files (PR #67)
+
+### Changed
+- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76)
+- Allow fallback on specified encoding if any (PR #71)
+
+## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16)
+### Changed
+- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63)
+- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64)
+
+## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15)
+### Fixed
+- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59)
+
+### Changed
+- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57)
+
+## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13)
+### Fixed
+- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55)
+- Using explain=False permanently disable the verbose output in the current runtime (PR #47)
+- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47)
+- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52)
+
+### Changed
+- Public function normalize default args values were not aligned with from_bytes (PR #53)
+
+### Added
+- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47)
+
+## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02)
+### Changed
+- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet.
+- Accent has been made on UTF-8 detection, should perform rather instantaneous.
+- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible.
+- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time)
+- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+
+- utf_7 detection has been reinstated.
+
+### Removed
+- This package no longer require anything when used with Python 3.5 (Dropped cached_property)
+- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volapük, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian.
+- The exception hook on UnicodeDecodeError has been removed.
+
+### Deprecated
+- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0
+
+### Fixed
+- The CLI output used the relative path of the file(s). Should be absolute.
+
+## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28)
+### Fixed
+- Logger configuration/usage no longer conflict with others (PR #44)
+
+## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21)
+### Removed
+- Using standard logging instead of using the package loguru.
+- Dropping nose test framework in favor of the maintained pytest.
+- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text.
+- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version.
+- Stop support for UTF-7 that does not contain a SIG.
+- Dropping PrettyTable, replaced with pure JSON output in CLI.
+
+### Fixed
+- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process.
+- Not searching properly for the BOM when trying utf32/16 parent codec.
+
+### Changed
+- Improving the package final size by compressing frequencies.json.
+- Huge improvement over the larges payload.
+
+### Added
+- CLI now produces JSON consumable output.
+- Return ASCII if given sequences fit. Given reasonable confidence.
+
+## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13)
+
+### Fixed
+- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40)
+
+## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12)
+
+### Fixed
+- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39)
+
+## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12)
+
+### Fixed
+- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38)
+
+## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09)
+
+### Changed
+- Amend the previous release to allow prettytable 2.0 (PR #35)
+
+## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08)
+
+### Fixed
+- Fix error while using the package with a python pre-release interpreter (PR #33)
+
+### Changed
+- Dependencies refactoring, constraints revised.
+
+### Added
+- Add python 3.9 and 3.10 to the supported interpreters
+
+MIT License
+
+Copyright (c) 2019 TAHRI Ahmed R.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/RECORD b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/RECORD
new file mode 100644
index 00000000..01dac36a
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/RECORD
@@ -0,0 +1,35 @@
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/charset_normalizer/__init__.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/charset_normalizer/api.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/charset_normalizer/assets/__init__.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/charset_normalizer/cd.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/charset_normalizer/cli/__init__.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/charset_normalizer/cli/normalizer.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/charset_normalizer/constant.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/charset_normalizer/legacy.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/charset_normalizer/md.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/charset_normalizer/models.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/charset_normalizer/utils.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/charset_normalizer/version.cpython-39.pyc,,
+../../../bin/normalizer,sha256=B0iPpa0c7Cfc3XREvwq2YiseMhIpfkCLyEMfGIeeg0M,264
+charset_normalizer-3.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+charset_normalizer-3.1.0.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070
+charset_normalizer-3.1.0.dist-info/METADATA,sha256=8lfcrrmtfEq--eZqh8FJzEjptLCEoGXySKruxIms44I,30983
+charset_normalizer-3.1.0.dist-info/RECORD,,
+charset_normalizer-3.1.0.dist-info/WHEEL,sha256=C_UN2A-Q94yg5uF00Lizg2aQEZjfzpeyFE_wdLoeYLk,108
+charset_normalizer-3.1.0.dist-info/entry_points.txt,sha256=uYo8aIGLWv8YgWfSna5HnfY_En4pkF1w4bgawNAXzP0,76
+charset_normalizer-3.1.0.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19
+charset_normalizer/__init__.py,sha256=aAb_F9Zb23pb4NO6TfIfqLXLvf1PjnLBBOuPvQwPA18,1549
+charset_normalizer/api.py,sha256=Vh44rFXztkxCjW7gF2waq8TyRL3mXKX8RwNGB99bhb4,18624
+charset_normalizer/assets/__init__.py,sha256=wpRfujN7GJuEE5wHHo3wEDVoJ5ovzRIxsImyimCBfGU,20069
+charset_normalizer/cd.py,sha256=mZuiTSKq4XpweSDD2H4T4R3Axtaa-QS0tpEWdpMuAzQ,12554
+charset_normalizer/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+charset_normalizer/cli/normalizer.py,sha256=2F-xURZJzo063Ye-2RLJ2wcmURpbKeAzKwpiws65dAs,9744
+charset_normalizer/constant.py,sha256=PmCeoKXqq3ZbCtCUpKHwwFBIv9DXMT_an1yd24q28mA,19101
+charset_normalizer/legacy.py,sha256=T-QuVMsMeDiQEk8WSszMrzVJg_14AMeSkmHdRYhdl1k,2071
+charset_normalizer/md.cpython-39-darwin.so,sha256=9aAH18Rlf98v5wy4kuUb9Sz-_troeRla88cqKQ6PFko,50564
+charset_normalizer/md.py,sha256=MXPKP_oLbsubulEL_1rxcYKSd5FeEfyEfNNm5O6ADpc,18258
+charset_normalizer/md__mypyc.cpython-39-darwin.so,sha256=dfbN1tM9jDanuJL-3v3j8oKVXkrWGRy6RkeqVRF5GIE,274459
+charset_normalizer/models.py,sha256=mC11wo84l00u2o03TRNX7M5ItBAbPUKKXgJSFxA35GY,11492
+charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+charset_normalizer/utils.py,sha256=tKLpquPYQdaRdFRwBo5gPOi06ov8UCJy5X1Pti0Q78U,11544
+charset_normalizer/version.py,sha256=bekbdpF_D3BtF-PhbPnA9PNaZaI8kKIgl3LTCD5FmYk,79
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/WHEEL b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/WHEEL
new file mode 100644
index 00000000..e5f45eec
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.38.4)
+Root-Is-Purelib: false
+Tag: cp39-cp39-macosx_11_0_arm64
+
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/entry_points.txt b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/entry_points.txt
new file mode 100644
index 00000000..a06d3600
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/entry_points.txt
@@ -0,0 +1,2 @@
+[console_scripts]
+normalizer = charset_normalizer.cli.normalizer:cli_detect
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/top_level.txt b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/top_level.txt
new file mode 100644
index 00000000..66958f0a
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer-3.1.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+charset_normalizer
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/__init__.py
new file mode 100644
index 00000000..ebb5da89
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/__init__.py
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+"""
+Charset-Normalizer
+~~~~~~~~~~~~~~
+The Real First Universal Charset Detector.
+A library that helps you read text from an unknown charset encoding.
+Motivated by chardet, This package is trying to resolve the issue by taking a new approach.
+All IANA character set names for which the Python core library provides codecs are supported.
+
+Basic usage:
+ >>> from charset_normalizer import from_bytes
+ >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8'))
+ >>> best_guess = results.best()
+ >>> str(best_guess)
+ 'Bсеки човек има право на образование. Oбразованието!'
+
+Others methods and usages are available - see the full documentation
+at .
+:copyright: (c) 2021 by Ahmed TAHRI
+:license: MIT, see LICENSE for more details.
+"""
+import logging
+
+from .api import from_bytes, from_fp, from_path
+from .legacy import detect
+from .models import CharsetMatch, CharsetMatches
+from .utils import set_logging_handler
+from .version import VERSION, __version__
+
+__all__ = (
+ "from_fp",
+ "from_path",
+ "from_bytes",
+ "detect",
+ "CharsetMatch",
+ "CharsetMatches",
+ "__version__",
+ "VERSION",
+ "set_logging_handler",
+)
+
+# Attach a NullHandler to the top level logger by default
+# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library
+
+logging.getLogger("charset_normalizer").addHandler(logging.NullHandler())
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/api.py b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/api.py
new file mode 100644
index 00000000..9dbf4201
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/api.py
@@ -0,0 +1,554 @@
+import logging
+from os import PathLike
+from typing import Any, BinaryIO, List, Optional, Set
+
+from .cd import (
+ coherence_ratio,
+ encoding_languages,
+ mb_encoding_languages,
+ merge_coherence_ratios,
+)
+from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE
+from .md import mess_ratio
+from .models import CharsetMatch, CharsetMatches
+from .utils import (
+ any_specified_encoding,
+ cut_sequence_chunks,
+ iana_name,
+ identify_sig_or_bom,
+ is_cp_similar,
+ is_multi_byte_encoding,
+ should_strip_sig_or_bom,
+)
+
+# Will most likely be controversial
+# logging.addLevelName(TRACE, "TRACE")
+logger = logging.getLogger("charset_normalizer")
+explain_handler = logging.StreamHandler()
+explain_handler.setFormatter(
+ logging.Formatter("%(asctime)s | %(levelname)s | %(message)s")
+)
+
+
+def from_bytes(
+ sequences: bytes,
+ steps: int = 5,
+ chunk_size: int = 512,
+ threshold: float = 0.2,
+ cp_isolation: Optional[List[str]] = None,
+ cp_exclusion: Optional[List[str]] = None,
+ preemptive_behaviour: bool = True,
+ explain: bool = False,
+ language_threshold: float = 0.1,
+) -> CharsetMatches:
+ """
+ Given a raw bytes sequence, return the best possibles charset usable to render str objects.
+ If there is no results, it is a strong indicator that the source is binary/not text.
+ By default, the process will extract 5 blocks of 512o each to assess the mess and coherence of a given sequence.
+ And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will.
+
+ The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page
+ but never take it for granted. Can improve the performance.
+
+ You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that
+ purpose.
+
+ This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32.
+ By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain'
+ toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging.
+ Custom logging format and handler can be set manually.
+ """
+
+ if not isinstance(sequences, (bytearray, bytes)):
+ raise TypeError(
+ "Expected object of type bytes or bytearray, got: {0}".format(
+ type(sequences)
+ )
+ )
+
+ if explain:
+ previous_logger_level: int = logger.level
+ logger.addHandler(explain_handler)
+ logger.setLevel(TRACE)
+
+ length: int = len(sequences)
+
+ if length == 0:
+ logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.")
+ if explain:
+ logger.removeHandler(explain_handler)
+ logger.setLevel(previous_logger_level or logging.WARNING)
+ return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")])
+
+ if cp_isolation is not None:
+ logger.log(
+ TRACE,
+ "cp_isolation is set. use this flag for debugging purpose. "
+ "limited list of encoding allowed : %s.",
+ ", ".join(cp_isolation),
+ )
+ cp_isolation = [iana_name(cp, False) for cp in cp_isolation]
+ else:
+ cp_isolation = []
+
+ if cp_exclusion is not None:
+ logger.log(
+ TRACE,
+ "cp_exclusion is set. use this flag for debugging purpose. "
+ "limited list of encoding excluded : %s.",
+ ", ".join(cp_exclusion),
+ )
+ cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion]
+ else:
+ cp_exclusion = []
+
+ if length <= (chunk_size * steps):
+ logger.log(
+ TRACE,
+ "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.",
+ steps,
+ chunk_size,
+ length,
+ )
+ steps = 1
+ chunk_size = length
+
+ if steps > 1 and length / steps < chunk_size:
+ chunk_size = int(length / steps)
+
+ is_too_small_sequence: bool = len(sequences) < TOO_SMALL_SEQUENCE
+ is_too_large_sequence: bool = len(sequences) >= TOO_BIG_SEQUENCE
+
+ if is_too_small_sequence:
+ logger.log(
+ TRACE,
+ "Trying to detect encoding from a tiny portion of ({}) byte(s).".format(
+ length
+ ),
+ )
+ elif is_too_large_sequence:
+ logger.log(
+ TRACE,
+ "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format(
+ length
+ ),
+ )
+
+ prioritized_encodings: List[str] = []
+
+ specified_encoding: Optional[str] = (
+ any_specified_encoding(sequences) if preemptive_behaviour else None
+ )
+
+ if specified_encoding is not None:
+ prioritized_encodings.append(specified_encoding)
+ logger.log(
+ TRACE,
+ "Detected declarative mark in sequence. Priority +1 given for %s.",
+ specified_encoding,
+ )
+
+ tested: Set[str] = set()
+ tested_but_hard_failure: List[str] = []
+ tested_but_soft_failure: List[str] = []
+
+ fallback_ascii: Optional[CharsetMatch] = None
+ fallback_u8: Optional[CharsetMatch] = None
+ fallback_specified: Optional[CharsetMatch] = None
+
+ results: CharsetMatches = CharsetMatches()
+
+ sig_encoding, sig_payload = identify_sig_or_bom(sequences)
+
+ if sig_encoding is not None:
+ prioritized_encodings.append(sig_encoding)
+ logger.log(
+ TRACE,
+ "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.",
+ len(sig_payload),
+ sig_encoding,
+ )
+
+ prioritized_encodings.append("ascii")
+
+ if "utf_8" not in prioritized_encodings:
+ prioritized_encodings.append("utf_8")
+
+ for encoding_iana in prioritized_encodings + IANA_SUPPORTED:
+ if cp_isolation and encoding_iana not in cp_isolation:
+ continue
+
+ if cp_exclusion and encoding_iana in cp_exclusion:
+ continue
+
+ if encoding_iana in tested:
+ continue
+
+ tested.add(encoding_iana)
+
+ decoded_payload: Optional[str] = None
+ bom_or_sig_available: bool = sig_encoding == encoding_iana
+ strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom(
+ encoding_iana
+ )
+
+ if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available:
+ logger.log(
+ TRACE,
+ "Encoding %s won't be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.",
+ encoding_iana,
+ )
+ continue
+ if encoding_iana in {"utf_7"} and not bom_or_sig_available:
+ logger.log(
+ TRACE,
+ "Encoding %s won't be tested as-is because detection is unreliable without BOM/SIG.",
+ encoding_iana,
+ )
+ continue
+
+ try:
+ is_multi_byte_decoder: bool = is_multi_byte_encoding(encoding_iana)
+ except (ModuleNotFoundError, ImportError):
+ logger.log(
+ TRACE,
+ "Encoding %s does not provide an IncrementalDecoder",
+ encoding_iana,
+ )
+ continue
+
+ try:
+ if is_too_large_sequence and is_multi_byte_decoder is False:
+ str(
+ sequences[: int(50e4)]
+ if strip_sig_or_bom is False
+ else sequences[len(sig_payload) : int(50e4)],
+ encoding=encoding_iana,
+ )
+ else:
+ decoded_payload = str(
+ sequences
+ if strip_sig_or_bom is False
+ else sequences[len(sig_payload) :],
+ encoding=encoding_iana,
+ )
+ except (UnicodeDecodeError, LookupError) as e:
+ if not isinstance(e, LookupError):
+ logger.log(
+ TRACE,
+ "Code page %s does not fit given bytes sequence at ALL. %s",
+ encoding_iana,
+ str(e),
+ )
+ tested_but_hard_failure.append(encoding_iana)
+ continue
+
+ similar_soft_failure_test: bool = False
+
+ for encoding_soft_failed in tested_but_soft_failure:
+ if is_cp_similar(encoding_iana, encoding_soft_failed):
+ similar_soft_failure_test = True
+ break
+
+ if similar_soft_failure_test:
+ logger.log(
+ TRACE,
+ "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!",
+ encoding_iana,
+ encoding_soft_failed,
+ )
+ continue
+
+ r_ = range(
+ 0 if not bom_or_sig_available else len(sig_payload),
+ length,
+ int(length / steps),
+ )
+
+ multi_byte_bonus: bool = (
+ is_multi_byte_decoder
+ and decoded_payload is not None
+ and len(decoded_payload) < length
+ )
+
+ if multi_byte_bonus:
+ logger.log(
+ TRACE,
+ "Code page %s is a multi byte encoding table and it appear that at least one character "
+ "was encoded using n-bytes.",
+ encoding_iana,
+ )
+
+ max_chunk_gave_up: int = int(len(r_) / 4)
+
+ max_chunk_gave_up = max(max_chunk_gave_up, 2)
+ early_stop_count: int = 0
+ lazy_str_hard_failure = False
+
+ md_chunks: List[str] = []
+ md_ratios = []
+
+ try:
+ for chunk in cut_sequence_chunks(
+ sequences,
+ encoding_iana,
+ r_,
+ chunk_size,
+ bom_or_sig_available,
+ strip_sig_or_bom,
+ sig_payload,
+ is_multi_byte_decoder,
+ decoded_payload,
+ ):
+ md_chunks.append(chunk)
+
+ md_ratios.append(
+ mess_ratio(
+ chunk,
+ threshold,
+ explain is True and 1 <= len(cp_isolation) <= 2,
+ )
+ )
+
+ if md_ratios[-1] >= threshold:
+ early_stop_count += 1
+
+ if (early_stop_count >= max_chunk_gave_up) or (
+ bom_or_sig_available and strip_sig_or_bom is False
+ ):
+ break
+ except (
+ UnicodeDecodeError
+ ) as e: # Lazy str loading may have missed something there
+ logger.log(
+ TRACE,
+ "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s",
+ encoding_iana,
+ str(e),
+ )
+ early_stop_count = max_chunk_gave_up
+ lazy_str_hard_failure = True
+
+ # We might want to check the sequence again with the whole content
+ # Only if initial MD tests passes
+ if (
+ not lazy_str_hard_failure
+ and is_too_large_sequence
+ and not is_multi_byte_decoder
+ ):
+ try:
+ sequences[int(50e3) :].decode(encoding_iana, errors="strict")
+ except UnicodeDecodeError as e:
+ logger.log(
+ TRACE,
+ "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s",
+ encoding_iana,
+ str(e),
+ )
+ tested_but_hard_failure.append(encoding_iana)
+ continue
+
+ mean_mess_ratio: float = sum(md_ratios) / len(md_ratios) if md_ratios else 0.0
+ if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up:
+ tested_but_soft_failure.append(encoding_iana)
+ logger.log(
+ TRACE,
+ "%s was excluded because of initial chaos probing. Gave up %i time(s). "
+ "Computed mean chaos is %f %%.",
+ encoding_iana,
+ early_stop_count,
+ round(mean_mess_ratio * 100, ndigits=3),
+ )
+ # Preparing those fallbacks in case we got nothing.
+ if (
+ encoding_iana in ["ascii", "utf_8", specified_encoding]
+ and not lazy_str_hard_failure
+ ):
+ fallback_entry = CharsetMatch(
+ sequences, encoding_iana, threshold, False, [], decoded_payload
+ )
+ if encoding_iana == specified_encoding:
+ fallback_specified = fallback_entry
+ elif encoding_iana == "ascii":
+ fallback_ascii = fallback_entry
+ else:
+ fallback_u8 = fallback_entry
+ continue
+
+ logger.log(
+ TRACE,
+ "%s passed initial chaos probing. Mean measured chaos is %f %%",
+ encoding_iana,
+ round(mean_mess_ratio * 100, ndigits=3),
+ )
+
+ if not is_multi_byte_decoder:
+ target_languages: List[str] = encoding_languages(encoding_iana)
+ else:
+ target_languages = mb_encoding_languages(encoding_iana)
+
+ if target_languages:
+ logger.log(
+ TRACE,
+ "{} should target any language(s) of {}".format(
+ encoding_iana, str(target_languages)
+ ),
+ )
+
+ cd_ratios = []
+
+ # We shall skip the CD when its about ASCII
+ # Most of the time its not relevant to run "language-detection" on it.
+ if encoding_iana != "ascii":
+ for chunk in md_chunks:
+ chunk_languages = coherence_ratio(
+ chunk,
+ language_threshold,
+ ",".join(target_languages) if target_languages else None,
+ )
+
+ cd_ratios.append(chunk_languages)
+
+ cd_ratios_merged = merge_coherence_ratios(cd_ratios)
+
+ if cd_ratios_merged:
+ logger.log(
+ TRACE,
+ "We detected language {} using {}".format(
+ cd_ratios_merged, encoding_iana
+ ),
+ )
+
+ results.append(
+ CharsetMatch(
+ sequences,
+ encoding_iana,
+ mean_mess_ratio,
+ bom_or_sig_available,
+ cd_ratios_merged,
+ decoded_payload,
+ )
+ )
+
+ if (
+ encoding_iana in [specified_encoding, "ascii", "utf_8"]
+ and mean_mess_ratio < 0.1
+ ):
+ logger.debug(
+ "Encoding detection: %s is most likely the one.", encoding_iana
+ )
+ if explain:
+ logger.removeHandler(explain_handler)
+ logger.setLevel(previous_logger_level)
+ return CharsetMatches([results[encoding_iana]])
+
+ if encoding_iana == sig_encoding:
+ logger.debug(
+ "Encoding detection: %s is most likely the one as we detected a BOM or SIG within "
+ "the beginning of the sequence.",
+ encoding_iana,
+ )
+ if explain:
+ logger.removeHandler(explain_handler)
+ logger.setLevel(previous_logger_level)
+ return CharsetMatches([results[encoding_iana]])
+
+ if len(results) == 0:
+ if fallback_u8 or fallback_ascii or fallback_specified:
+ logger.log(
+ TRACE,
+ "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.",
+ )
+
+ if fallback_specified:
+ logger.debug(
+ "Encoding detection: %s will be used as a fallback match",
+ fallback_specified.encoding,
+ )
+ results.append(fallback_specified)
+ elif (
+ (fallback_u8 and fallback_ascii is None)
+ or (
+ fallback_u8
+ and fallback_ascii
+ and fallback_u8.fingerprint != fallback_ascii.fingerprint
+ )
+ or (fallback_u8 is not None)
+ ):
+ logger.debug("Encoding detection: utf_8 will be used as a fallback match")
+ results.append(fallback_u8)
+ elif fallback_ascii:
+ logger.debug("Encoding detection: ascii will be used as a fallback match")
+ results.append(fallback_ascii)
+
+ if results:
+ logger.debug(
+ "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.",
+ results.best().encoding, # type: ignore
+ len(results) - 1,
+ )
+ else:
+ logger.debug("Encoding detection: Unable to determine any suitable charset.")
+
+ if explain:
+ logger.removeHandler(explain_handler)
+ logger.setLevel(previous_logger_level)
+
+ return results
+
+
+def from_fp(
+ fp: BinaryIO,
+ steps: int = 5,
+ chunk_size: int = 512,
+ threshold: float = 0.20,
+ cp_isolation: Optional[List[str]] = None,
+ cp_exclusion: Optional[List[str]] = None,
+ preemptive_behaviour: bool = True,
+ explain: bool = False,
+ language_threshold: float = 0.1,
+) -> CharsetMatches:
+ """
+ Same thing than the function from_bytes but using a file pointer that is already ready.
+ Will not close the file pointer.
+ """
+ return from_bytes(
+ fp.read(),
+ steps,
+ chunk_size,
+ threshold,
+ cp_isolation,
+ cp_exclusion,
+ preemptive_behaviour,
+ explain,
+ language_threshold,
+ )
+
+
+def from_path(
+ path: "PathLike[Any]",
+ steps: int = 5,
+ chunk_size: int = 512,
+ threshold: float = 0.20,
+ cp_isolation: Optional[List[str]] = None,
+ cp_exclusion: Optional[List[str]] = None,
+ preemptive_behaviour: bool = True,
+ explain: bool = False,
+ language_threshold: float = 0.1,
+) -> CharsetMatches:
+ """
+ Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode.
+ Can raise IOError.
+ """
+ with open(path, "rb") as fp:
+ return from_fp(
+ fp,
+ steps,
+ chunk_size,
+ threshold,
+ cp_isolation,
+ cp_exclusion,
+ preemptive_behaviour,
+ explain,
+ language_threshold,
+ )
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/assets/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/assets/__init__.py
new file mode 100644
index 00000000..9075930d
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/assets/__init__.py
@@ -0,0 +1,1440 @@
+# -*- coding: utf-8 -*-
+from typing import Dict, List
+
+# Language label that contain the em dash "—"
+# character are to be considered alternative seq to origin
+FREQUENCIES: Dict[str, List[str]] = {
+ "English": [
+ "e",
+ "a",
+ "t",
+ "i",
+ "o",
+ "n",
+ "s",
+ "r",
+ "h",
+ "l",
+ "d",
+ "c",
+ "u",
+ "m",
+ "f",
+ "p",
+ "g",
+ "w",
+ "y",
+ "b",
+ "v",
+ "k",
+ "x",
+ "j",
+ "z",
+ "q",
+ ],
+ "English—": [
+ "e",
+ "a",
+ "t",
+ "i",
+ "o",
+ "n",
+ "s",
+ "r",
+ "h",
+ "l",
+ "d",
+ "c",
+ "m",
+ "u",
+ "f",
+ "p",
+ "g",
+ "w",
+ "b",
+ "y",
+ "v",
+ "k",
+ "j",
+ "x",
+ "z",
+ "q",
+ ],
+ "German": [
+ "e",
+ "n",
+ "i",
+ "r",
+ "s",
+ "t",
+ "a",
+ "d",
+ "h",
+ "u",
+ "l",
+ "g",
+ "o",
+ "c",
+ "m",
+ "b",
+ "f",
+ "k",
+ "w",
+ "z",
+ "p",
+ "v",
+ "ü",
+ "ä",
+ "ö",
+ "j",
+ ],
+ "French": [
+ "e",
+ "a",
+ "s",
+ "n",
+ "i",
+ "t",
+ "r",
+ "l",
+ "u",
+ "o",
+ "d",
+ "c",
+ "p",
+ "m",
+ "é",
+ "v",
+ "g",
+ "f",
+ "b",
+ "h",
+ "q",
+ "à",
+ "x",
+ "è",
+ "y",
+ "j",
+ ],
+ "Dutch": [
+ "e",
+ "n",
+ "a",
+ "i",
+ "r",
+ "t",
+ "o",
+ "d",
+ "s",
+ "l",
+ "g",
+ "h",
+ "v",
+ "m",
+ "u",
+ "k",
+ "c",
+ "p",
+ "b",
+ "w",
+ "j",
+ "z",
+ "f",
+ "y",
+ "x",
+ "ë",
+ ],
+ "Italian": [
+ "e",
+ "i",
+ "a",
+ "o",
+ "n",
+ "l",
+ "t",
+ "r",
+ "s",
+ "c",
+ "d",
+ "u",
+ "p",
+ "m",
+ "g",
+ "v",
+ "f",
+ "b",
+ "z",
+ "h",
+ "q",
+ "è",
+ "à",
+ "k",
+ "y",
+ "ò",
+ ],
+ "Polish": [
+ "a",
+ "i",
+ "o",
+ "e",
+ "n",
+ "r",
+ "z",
+ "w",
+ "s",
+ "c",
+ "t",
+ "k",
+ "y",
+ "d",
+ "p",
+ "m",
+ "u",
+ "l",
+ "j",
+ "ł",
+ "g",
+ "b",
+ "h",
+ "ą",
+ "ę",
+ "ó",
+ ],
+ "Spanish": [
+ "e",
+ "a",
+ "o",
+ "n",
+ "s",
+ "r",
+ "i",
+ "l",
+ "d",
+ "t",
+ "c",
+ "u",
+ "m",
+ "p",
+ "b",
+ "g",
+ "v",
+ "f",
+ "y",
+ "ó",
+ "h",
+ "q",
+ "í",
+ "j",
+ "z",
+ "á",
+ ],
+ "Russian": [
+ "о",
+ "а",
+ "е",
+ "и",
+ "н",
+ "с",
+ "т",
+ "р",
+ "в",
+ "л",
+ "к",
+ "м",
+ "д",
+ "п",
+ "у",
+ "г",
+ "я",
+ "ы",
+ "з",
+ "б",
+ "й",
+ "ь",
+ "ч",
+ "х",
+ "ж",
+ "ц",
+ ],
+ # Jap-Kanji
+ "Japanese": [
+ "人",
+ "一",
+ "大",
+ "亅",
+ "丁",
+ "丨",
+ "竹",
+ "笑",
+ "口",
+ "日",
+ "今",
+ "二",
+ "彳",
+ "行",
+ "十",
+ "土",
+ "丶",
+ "寸",
+ "寺",
+ "時",
+ "乙",
+ "丿",
+ "乂",
+ "气",
+ "気",
+ "冂",
+ "巾",
+ "亠",
+ "市",
+ "目",
+ "儿",
+ "見",
+ "八",
+ "小",
+ "凵",
+ "県",
+ "月",
+ "彐",
+ "門",
+ "間",
+ "木",
+ "東",
+ "山",
+ "出",
+ "本",
+ "中",
+ "刀",
+ "分",
+ "耳",
+ "又",
+ "取",
+ "最",
+ "言",
+ "田",
+ "心",
+ "思",
+ "刂",
+ "前",
+ "京",
+ "尹",
+ "事",
+ "生",
+ "厶",
+ "云",
+ "会",
+ "未",
+ "来",
+ "白",
+ "冫",
+ "楽",
+ "灬",
+ "馬",
+ "尸",
+ "尺",
+ "駅",
+ "明",
+ "耂",
+ "者",
+ "了",
+ "阝",
+ "都",
+ "高",
+ "卜",
+ "占",
+ "厂",
+ "广",
+ "店",
+ "子",
+ "申",
+ "奄",
+ "亻",
+ "俺",
+ "上",
+ "方",
+ "冖",
+ "学",
+ "衣",
+ "艮",
+ "食",
+ "自",
+ ],
+ # Jap-Katakana
+ "Japanese—": [
+ "ー",
+ "ン",
+ "ス",
+ "・",
+ "ル",
+ "ト",
+ "リ",
+ "イ",
+ "ア",
+ "ラ",
+ "ッ",
+ "ク",
+ "ド",
+ "シ",
+ "レ",
+ "ジ",
+ "タ",
+ "フ",
+ "ロ",
+ "カ",
+ "テ",
+ "マ",
+ "ィ",
+ "グ",
+ "バ",
+ "ム",
+ "プ",
+ "オ",
+ "コ",
+ "デ",
+ "ニ",
+ "ウ",
+ "メ",
+ "サ",
+ "ビ",
+ "ナ",
+ "ブ",
+ "ャ",
+ "エ",
+ "ュ",
+ "チ",
+ "キ",
+ "ズ",
+ "ダ",
+ "パ",
+ "ミ",
+ "ェ",
+ "ョ",
+ "ハ",
+ "セ",
+ "ベ",
+ "ガ",
+ "モ",
+ "ツ",
+ "ネ",
+ "ボ",
+ "ソ",
+ "ノ",
+ "ァ",
+ "ヴ",
+ "ワ",
+ "ポ",
+ "ペ",
+ "ピ",
+ "ケ",
+ "ゴ",
+ "ギ",
+ "ザ",
+ "ホ",
+ "ゲ",
+ "ォ",
+ "ヤ",
+ "ヒ",
+ "ユ",
+ "ヨ",
+ "ヘ",
+ "ゼ",
+ "ヌ",
+ "ゥ",
+ "ゾ",
+ "ヶ",
+ "ヂ",
+ "ヲ",
+ "ヅ",
+ "ヵ",
+ "ヱ",
+ "ヰ",
+ "ヮ",
+ "ヽ",
+ "゠",
+ "ヾ",
+ "ヷ",
+ "ヿ",
+ "ヸ",
+ "ヹ",
+ "ヺ",
+ ],
+ # Jap-Hiragana
+ "Japanese——": [
+ "の",
+ "に",
+ "る",
+ "た",
+ "と",
+ "は",
+ "し",
+ "い",
+ "を",
+ "で",
+ "て",
+ "が",
+ "な",
+ "れ",
+ "か",
+ "ら",
+ "さ",
+ "っ",
+ "り",
+ "す",
+ "あ",
+ "も",
+ "こ",
+ "ま",
+ "う",
+ "く",
+ "よ",
+ "き",
+ "ん",
+ "め",
+ "お",
+ "け",
+ "そ",
+ "つ",
+ "だ",
+ "や",
+ "え",
+ "ど",
+ "わ",
+ "ち",
+ "み",
+ "せ",
+ "じ",
+ "ば",
+ "へ",
+ "び",
+ "ず",
+ "ろ",
+ "ほ",
+ "げ",
+ "む",
+ "べ",
+ "ひ",
+ "ょ",
+ "ゆ",
+ "ぶ",
+ "ご",
+ "ゃ",
+ "ね",
+ "ふ",
+ "ぐ",
+ "ぎ",
+ "ぼ",
+ "ゅ",
+ "づ",
+ "ざ",
+ "ぞ",
+ "ぬ",
+ "ぜ",
+ "ぱ",
+ "ぽ",
+ "ぷ",
+ "ぴ",
+ "ぃ",
+ "ぁ",
+ "ぇ",
+ "ぺ",
+ "ゞ",
+ "ぢ",
+ "ぉ",
+ "ぅ",
+ "ゐ",
+ "ゝ",
+ "ゑ",
+ "゛",
+ "゜",
+ "ゎ",
+ "ゔ",
+ "゚",
+ "ゟ",
+ "゙",
+ "ゕ",
+ "ゖ",
+ ],
+ "Portuguese": [
+ "a",
+ "e",
+ "o",
+ "s",
+ "i",
+ "r",
+ "d",
+ "n",
+ "t",
+ "m",
+ "u",
+ "c",
+ "l",
+ "p",
+ "g",
+ "v",
+ "b",
+ "f",
+ "h",
+ "ã",
+ "q",
+ "é",
+ "ç",
+ "á",
+ "z",
+ "í",
+ ],
+ "Swedish": [
+ "e",
+ "a",
+ "n",
+ "r",
+ "t",
+ "s",
+ "i",
+ "l",
+ "d",
+ "o",
+ "m",
+ "k",
+ "g",
+ "v",
+ "h",
+ "f",
+ "u",
+ "p",
+ "ä",
+ "c",
+ "b",
+ "ö",
+ "å",
+ "y",
+ "j",
+ "x",
+ ],
+ "Chinese": [
+ "的",
+ "一",
+ "是",
+ "不",
+ "了",
+ "在",
+ "人",
+ "有",
+ "我",
+ "他",
+ "这",
+ "个",
+ "们",
+ "中",
+ "来",
+ "上",
+ "大",
+ "为",
+ "和",
+ "国",
+ "地",
+ "到",
+ "以",
+ "说",
+ "时",
+ "要",
+ "就",
+ "出",
+ "会",
+ "可",
+ "也",
+ "你",
+ "对",
+ "生",
+ "能",
+ "而",
+ "子",
+ "那",
+ "得",
+ "于",
+ "着",
+ "下",
+ "自",
+ "之",
+ "年",
+ "过",
+ "发",
+ "后",
+ "作",
+ "里",
+ "用",
+ "道",
+ "行",
+ "所",
+ "然",
+ "家",
+ "种",
+ "事",
+ "成",
+ "方",
+ "多",
+ "经",
+ "么",
+ "去",
+ "法",
+ "学",
+ "如",
+ "都",
+ "同",
+ "现",
+ "当",
+ "没",
+ "动",
+ "面",
+ "起",
+ "看",
+ "定",
+ "天",
+ "分",
+ "还",
+ "进",
+ "好",
+ "小",
+ "部",
+ "其",
+ "些",
+ "主",
+ "样",
+ "理",
+ "心",
+ "她",
+ "本",
+ "前",
+ "开",
+ "但",
+ "因",
+ "只",
+ "从",
+ "想",
+ "实",
+ ],
+ "Ukrainian": [
+ "о",
+ "а",
+ "н",
+ "і",
+ "и",
+ "р",
+ "в",
+ "т",
+ "е",
+ "с",
+ "к",
+ "л",
+ "у",
+ "д",
+ "м",
+ "п",
+ "з",
+ "я",
+ "ь",
+ "б",
+ "г",
+ "й",
+ "ч",
+ "х",
+ "ц",
+ "ї",
+ ],
+ "Norwegian": [
+ "e",
+ "r",
+ "n",
+ "t",
+ "a",
+ "s",
+ "i",
+ "o",
+ "l",
+ "d",
+ "g",
+ "k",
+ "m",
+ "v",
+ "f",
+ "p",
+ "u",
+ "b",
+ "h",
+ "å",
+ "y",
+ "j",
+ "ø",
+ "c",
+ "æ",
+ "w",
+ ],
+ "Finnish": [
+ "a",
+ "i",
+ "n",
+ "t",
+ "e",
+ "s",
+ "l",
+ "o",
+ "u",
+ "k",
+ "ä",
+ "m",
+ "r",
+ "v",
+ "j",
+ "h",
+ "p",
+ "y",
+ "d",
+ "ö",
+ "g",
+ "c",
+ "b",
+ "f",
+ "w",
+ "z",
+ ],
+ "Vietnamese": [
+ "n",
+ "h",
+ "t",
+ "i",
+ "c",
+ "g",
+ "a",
+ "o",
+ "u",
+ "m",
+ "l",
+ "r",
+ "à",
+ "đ",
+ "s",
+ "e",
+ "v",
+ "p",
+ "b",
+ "y",
+ "ư",
+ "d",
+ "á",
+ "k",
+ "ộ",
+ "ế",
+ ],
+ "Czech": [
+ "o",
+ "e",
+ "a",
+ "n",
+ "t",
+ "s",
+ "i",
+ "l",
+ "v",
+ "r",
+ "k",
+ "d",
+ "u",
+ "m",
+ "p",
+ "í",
+ "c",
+ "h",
+ "z",
+ "á",
+ "y",
+ "j",
+ "b",
+ "ě",
+ "é",
+ "ř",
+ ],
+ "Hungarian": [
+ "e",
+ "a",
+ "t",
+ "l",
+ "s",
+ "n",
+ "k",
+ "r",
+ "i",
+ "o",
+ "z",
+ "á",
+ "é",
+ "g",
+ "m",
+ "b",
+ "y",
+ "v",
+ "d",
+ "h",
+ "u",
+ "p",
+ "j",
+ "ö",
+ "f",
+ "c",
+ ],
+ "Korean": [
+ "이",
+ "다",
+ "에",
+ "의",
+ "는",
+ "로",
+ "하",
+ "을",
+ "가",
+ "고",
+ "지",
+ "서",
+ "한",
+ "은",
+ "기",
+ "으",
+ "년",
+ "대",
+ "사",
+ "시",
+ "를",
+ "리",
+ "도",
+ "인",
+ "스",
+ "일",
+ ],
+ "Indonesian": [
+ "a",
+ "n",
+ "e",
+ "i",
+ "r",
+ "t",
+ "u",
+ "s",
+ "d",
+ "k",
+ "m",
+ "l",
+ "g",
+ "p",
+ "b",
+ "o",
+ "h",
+ "y",
+ "j",
+ "c",
+ "w",
+ "f",
+ "v",
+ "z",
+ "x",
+ "q",
+ ],
+ "Turkish": [
+ "a",
+ "e",
+ "i",
+ "n",
+ "r",
+ "l",
+ "ı",
+ "k",
+ "d",
+ "t",
+ "s",
+ "m",
+ "y",
+ "u",
+ "o",
+ "b",
+ "ü",
+ "ş",
+ "v",
+ "g",
+ "z",
+ "h",
+ "c",
+ "p",
+ "ç",
+ "ğ",
+ ],
+ "Romanian": [
+ "e",
+ "i",
+ "a",
+ "r",
+ "n",
+ "t",
+ "u",
+ "l",
+ "o",
+ "c",
+ "s",
+ "d",
+ "p",
+ "m",
+ "ă",
+ "f",
+ "v",
+ "î",
+ "g",
+ "b",
+ "ș",
+ "ț",
+ "z",
+ "h",
+ "â",
+ "j",
+ ],
+ "Farsi": [
+ "ا",
+ "ی",
+ "ر",
+ "د",
+ "ن",
+ "ه",
+ "و",
+ "م",
+ "ت",
+ "ب",
+ "س",
+ "ل",
+ "ک",
+ "ش",
+ "ز",
+ "ف",
+ "گ",
+ "ع",
+ "خ",
+ "ق",
+ "ج",
+ "آ",
+ "پ",
+ "ح",
+ "ط",
+ "ص",
+ ],
+ "Arabic": [
+ "ا",
+ "ل",
+ "ي",
+ "م",
+ "و",
+ "ن",
+ "ر",
+ "ت",
+ "ب",
+ "ة",
+ "ع",
+ "د",
+ "س",
+ "ف",
+ "ه",
+ "ك",
+ "ق",
+ "أ",
+ "ح",
+ "ج",
+ "ش",
+ "ط",
+ "ص",
+ "ى",
+ "خ",
+ "إ",
+ ],
+ "Danish": [
+ "e",
+ "r",
+ "n",
+ "t",
+ "a",
+ "i",
+ "s",
+ "d",
+ "l",
+ "o",
+ "g",
+ "m",
+ "k",
+ "f",
+ "v",
+ "u",
+ "b",
+ "h",
+ "p",
+ "å",
+ "y",
+ "ø",
+ "æ",
+ "c",
+ "j",
+ "w",
+ ],
+ "Serbian": [
+ "а",
+ "и",
+ "о",
+ "е",
+ "н",
+ "р",
+ "с",
+ "у",
+ "т",
+ "к",
+ "ј",
+ "в",
+ "д",
+ "м",
+ "п",
+ "л",
+ "г",
+ "з",
+ "б",
+ "a",
+ "i",
+ "e",
+ "o",
+ "n",
+ "ц",
+ "ш",
+ ],
+ "Lithuanian": [
+ "i",
+ "a",
+ "s",
+ "o",
+ "r",
+ "e",
+ "t",
+ "n",
+ "u",
+ "k",
+ "m",
+ "l",
+ "p",
+ "v",
+ "d",
+ "j",
+ "g",
+ "ė",
+ "b",
+ "y",
+ "ų",
+ "š",
+ "ž",
+ "c",
+ "ą",
+ "į",
+ ],
+ "Slovene": [
+ "e",
+ "a",
+ "i",
+ "o",
+ "n",
+ "r",
+ "s",
+ "l",
+ "t",
+ "j",
+ "v",
+ "k",
+ "d",
+ "p",
+ "m",
+ "u",
+ "z",
+ "b",
+ "g",
+ "h",
+ "č",
+ "c",
+ "š",
+ "ž",
+ "f",
+ "y",
+ ],
+ "Slovak": [
+ "o",
+ "a",
+ "e",
+ "n",
+ "i",
+ "r",
+ "v",
+ "t",
+ "s",
+ "l",
+ "k",
+ "d",
+ "m",
+ "p",
+ "u",
+ "c",
+ "h",
+ "j",
+ "b",
+ "z",
+ "á",
+ "y",
+ "ý",
+ "í",
+ "č",
+ "é",
+ ],
+ "Hebrew": [
+ "י",
+ "ו",
+ "ה",
+ "ל",
+ "ר",
+ "ב",
+ "ת",
+ "מ",
+ "א",
+ "ש",
+ "נ",
+ "ע",
+ "ם",
+ "ד",
+ "ק",
+ "ח",
+ "פ",
+ "ס",
+ "כ",
+ "ג",
+ "ט",
+ "צ",
+ "ן",
+ "ז",
+ "ך",
+ ],
+ "Bulgarian": [
+ "а",
+ "и",
+ "о",
+ "е",
+ "н",
+ "т",
+ "р",
+ "с",
+ "в",
+ "л",
+ "к",
+ "д",
+ "п",
+ "м",
+ "з",
+ "г",
+ "я",
+ "ъ",
+ "у",
+ "б",
+ "ч",
+ "ц",
+ "й",
+ "ж",
+ "щ",
+ "х",
+ ],
+ "Croatian": [
+ "a",
+ "i",
+ "o",
+ "e",
+ "n",
+ "r",
+ "j",
+ "s",
+ "t",
+ "u",
+ "k",
+ "l",
+ "v",
+ "d",
+ "m",
+ "p",
+ "g",
+ "z",
+ "b",
+ "c",
+ "č",
+ "h",
+ "š",
+ "ž",
+ "ć",
+ "f",
+ ],
+ "Hindi": [
+ "क",
+ "र",
+ "स",
+ "न",
+ "त",
+ "म",
+ "ह",
+ "प",
+ "य",
+ "ल",
+ "व",
+ "ज",
+ "द",
+ "ग",
+ "ब",
+ "श",
+ "ट",
+ "अ",
+ "ए",
+ "थ",
+ "भ",
+ "ड",
+ "च",
+ "ध",
+ "ष",
+ "इ",
+ ],
+ "Estonian": [
+ "a",
+ "i",
+ "e",
+ "s",
+ "t",
+ "l",
+ "u",
+ "n",
+ "o",
+ "k",
+ "r",
+ "d",
+ "m",
+ "v",
+ "g",
+ "p",
+ "j",
+ "h",
+ "ä",
+ "b",
+ "õ",
+ "ü",
+ "f",
+ "c",
+ "ö",
+ "y",
+ ],
+ "Thai": [
+ "า",
+ "น",
+ "ร",
+ "อ",
+ "ก",
+ "เ",
+ "ง",
+ "ม",
+ "ย",
+ "ล",
+ "ว",
+ "ด",
+ "ท",
+ "ส",
+ "ต",
+ "ะ",
+ "ป",
+ "บ",
+ "ค",
+ "ห",
+ "แ",
+ "จ",
+ "พ",
+ "ช",
+ "ข",
+ "ใ",
+ ],
+ "Greek": [
+ "α",
+ "τ",
+ "ο",
+ "ι",
+ "ε",
+ "ν",
+ "ρ",
+ "σ",
+ "κ",
+ "η",
+ "π",
+ "ς",
+ "υ",
+ "μ",
+ "λ",
+ "ί",
+ "ό",
+ "ά",
+ "γ",
+ "έ",
+ "δ",
+ "ή",
+ "ω",
+ "χ",
+ "θ",
+ "ύ",
+ ],
+ "Tamil": [
+ "க",
+ "த",
+ "ப",
+ "ட",
+ "ர",
+ "ம",
+ "ல",
+ "ன",
+ "வ",
+ "ற",
+ "ய",
+ "ள",
+ "ச",
+ "ந",
+ "இ",
+ "ண",
+ "அ",
+ "ஆ",
+ "ழ",
+ "ங",
+ "எ",
+ "உ",
+ "ஒ",
+ "ஸ",
+ ],
+ "Kazakh": [
+ "а",
+ "ы",
+ "е",
+ "н",
+ "т",
+ "р",
+ "л",
+ "і",
+ "д",
+ "с",
+ "м",
+ "қ",
+ "к",
+ "о",
+ "б",
+ "и",
+ "у",
+ "ғ",
+ "ж",
+ "ң",
+ "з",
+ "ш",
+ "й",
+ "п",
+ "г",
+ "ө",
+ ],
+}
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/cd.py b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/cd.py
new file mode 100644
index 00000000..6e56fe84
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/cd.py
@@ -0,0 +1,390 @@
+import importlib
+from codecs import IncrementalDecoder
+from collections import Counter
+from functools import lru_cache
+from typing import Counter as TypeCounter, Dict, List, Optional, Tuple
+
+from .assets import FREQUENCIES
+from .constant import KO_NAMES, LANGUAGE_SUPPORTED_COUNT, TOO_SMALL_SEQUENCE, ZH_NAMES
+from .md import is_suspiciously_successive_range
+from .models import CoherenceMatches
+from .utils import (
+ is_accentuated,
+ is_latin,
+ is_multi_byte_encoding,
+ is_unicode_range_secondary,
+ unicode_range,
+)
+
+
+def encoding_unicode_range(iana_name: str) -> List[str]:
+ """
+ Return associated unicode ranges in a single byte code page.
+ """
+ if is_multi_byte_encoding(iana_name):
+ raise IOError("Function not supported on multi-byte code page")
+
+ decoder = importlib.import_module(
+ "encodings.{}".format(iana_name)
+ ).IncrementalDecoder
+
+ p: IncrementalDecoder = decoder(errors="ignore")
+ seen_ranges: Dict[str, int] = {}
+ character_count: int = 0
+
+ for i in range(0x40, 0xFF):
+ chunk: str = p.decode(bytes([i]))
+
+ if chunk:
+ character_range: Optional[str] = unicode_range(chunk)
+
+ if character_range is None:
+ continue
+
+ if is_unicode_range_secondary(character_range) is False:
+ if character_range not in seen_ranges:
+ seen_ranges[character_range] = 0
+ seen_ranges[character_range] += 1
+ character_count += 1
+
+ return sorted(
+ [
+ character_range
+ for character_range in seen_ranges
+ if seen_ranges[character_range] / character_count >= 0.15
+ ]
+ )
+
+
+def unicode_range_languages(primary_range: str) -> List[str]:
+ """
+ Return inferred languages used with a unicode range.
+ """
+ languages: List[str] = []
+
+ for language, characters in FREQUENCIES.items():
+ for character in characters:
+ if unicode_range(character) == primary_range:
+ languages.append(language)
+ break
+
+ return languages
+
+
+@lru_cache()
+def encoding_languages(iana_name: str) -> List[str]:
+ """
+ Single-byte encoding language association. Some code page are heavily linked to particular language(s).
+ This function does the correspondence.
+ """
+ unicode_ranges: List[str] = encoding_unicode_range(iana_name)
+ primary_range: Optional[str] = None
+
+ for specified_range in unicode_ranges:
+ if "Latin" not in specified_range:
+ primary_range = specified_range
+ break
+
+ if primary_range is None:
+ return ["Latin Based"]
+
+ return unicode_range_languages(primary_range)
+
+
+@lru_cache()
+def mb_encoding_languages(iana_name: str) -> List[str]:
+ """
+ Multi-byte encoding language association. Some code page are heavily linked to particular language(s).
+ This function does the correspondence.
+ """
+ if (
+ iana_name.startswith("shift_")
+ or iana_name.startswith("iso2022_jp")
+ or iana_name.startswith("euc_j")
+ or iana_name == "cp932"
+ ):
+ return ["Japanese"]
+ if iana_name.startswith("gb") or iana_name in ZH_NAMES:
+ return ["Chinese"]
+ if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES:
+ return ["Korean"]
+
+ return []
+
+
+@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT)
+def get_target_features(language: str) -> Tuple[bool, bool]:
+ """
+ Determine main aspects from a supported language if it contains accents and if is pure Latin.
+ """
+ target_have_accents: bool = False
+ target_pure_latin: bool = True
+
+ for character in FREQUENCIES[language]:
+ if not target_have_accents and is_accentuated(character):
+ target_have_accents = True
+ if target_pure_latin and is_latin(character) is False:
+ target_pure_latin = False
+
+ return target_have_accents, target_pure_latin
+
+
+def alphabet_languages(
+ characters: List[str], ignore_non_latin: bool = False
+) -> List[str]:
+ """
+ Return associated languages associated to given characters.
+ """
+ languages: List[Tuple[str, float]] = []
+
+ source_have_accents = any(is_accentuated(character) for character in characters)
+
+ for language, language_characters in FREQUENCIES.items():
+ target_have_accents, target_pure_latin = get_target_features(language)
+
+ if ignore_non_latin and target_pure_latin is False:
+ continue
+
+ if target_have_accents is False and source_have_accents:
+ continue
+
+ character_count: int = len(language_characters)
+
+ character_match_count: int = len(
+ [c for c in language_characters if c in characters]
+ )
+
+ ratio: float = character_match_count / character_count
+
+ if ratio >= 0.2:
+ languages.append((language, ratio))
+
+ languages = sorted(languages, key=lambda x: x[1], reverse=True)
+
+ return [compatible_language[0] for compatible_language in languages]
+
+
+def characters_popularity_compare(
+ language: str, ordered_characters: List[str]
+) -> float:
+ """
+ Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language.
+ The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit).
+ Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.)
+ """
+ if language not in FREQUENCIES:
+ raise ValueError("{} not available".format(language))
+
+ character_approved_count: int = 0
+ FREQUENCIES_language_set = set(FREQUENCIES[language])
+
+ ordered_characters_count: int = len(ordered_characters)
+ target_language_characters_count: int = len(FREQUENCIES[language])
+
+ large_alphabet: bool = target_language_characters_count > 26
+
+ for character, character_rank in zip(
+ ordered_characters, range(0, ordered_characters_count)
+ ):
+ if character not in FREQUENCIES_language_set:
+ continue
+
+ character_rank_in_language: int = FREQUENCIES[language].index(character)
+ expected_projection_ratio: float = (
+ target_language_characters_count / ordered_characters_count
+ )
+ character_rank_projection: int = int(character_rank * expected_projection_ratio)
+
+ if (
+ large_alphabet is False
+ and abs(character_rank_projection - character_rank_in_language) > 4
+ ):
+ continue
+
+ if (
+ large_alphabet is True
+ and abs(character_rank_projection - character_rank_in_language)
+ < target_language_characters_count / 3
+ ):
+ character_approved_count += 1
+ continue
+
+ characters_before_source: List[str] = FREQUENCIES[language][
+ 0:character_rank_in_language
+ ]
+ characters_after_source: List[str] = FREQUENCIES[language][
+ character_rank_in_language:
+ ]
+ characters_before: List[str] = ordered_characters[0:character_rank]
+ characters_after: List[str] = ordered_characters[character_rank:]
+
+ before_match_count: int = len(
+ set(characters_before) & set(characters_before_source)
+ )
+
+ after_match_count: int = len(
+ set(characters_after) & set(characters_after_source)
+ )
+
+ if len(characters_before_source) == 0 and before_match_count <= 4:
+ character_approved_count += 1
+ continue
+
+ if len(characters_after_source) == 0 and after_match_count <= 4:
+ character_approved_count += 1
+ continue
+
+ if (
+ before_match_count / len(characters_before_source) >= 0.4
+ or after_match_count / len(characters_after_source) >= 0.4
+ ):
+ character_approved_count += 1
+ continue
+
+ return character_approved_count / len(ordered_characters)
+
+
+def alpha_unicode_split(decoded_sequence: str) -> List[str]:
+ """
+ Given a decoded text sequence, return a list of str. Unicode range / alphabet separation.
+ Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list;
+ One containing the latin letters and the other hebrew.
+ """
+ layers: Dict[str, str] = {}
+
+ for character in decoded_sequence:
+ if character.isalpha() is False:
+ continue
+
+ character_range: Optional[str] = unicode_range(character)
+
+ if character_range is None:
+ continue
+
+ layer_target_range: Optional[str] = None
+
+ for discovered_range in layers:
+ if (
+ is_suspiciously_successive_range(discovered_range, character_range)
+ is False
+ ):
+ layer_target_range = discovered_range
+ break
+
+ if layer_target_range is None:
+ layer_target_range = character_range
+
+ if layer_target_range not in layers:
+ layers[layer_target_range] = character.lower()
+ continue
+
+ layers[layer_target_range] += character.lower()
+
+ return list(layers.values())
+
+
+def merge_coherence_ratios(results: List[CoherenceMatches]) -> CoherenceMatches:
+ """
+ This function merge results previously given by the function coherence_ratio.
+ The return type is the same as coherence_ratio.
+ """
+ per_language_ratios: Dict[str, List[float]] = {}
+ for result in results:
+ for sub_result in result:
+ language, ratio = sub_result
+ if language not in per_language_ratios:
+ per_language_ratios[language] = [ratio]
+ continue
+ per_language_ratios[language].append(ratio)
+
+ merge = [
+ (
+ language,
+ round(
+ sum(per_language_ratios[language]) / len(per_language_ratios[language]),
+ 4,
+ ),
+ )
+ for language in per_language_ratios
+ ]
+
+ return sorted(merge, key=lambda x: x[1], reverse=True)
+
+
+def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches:
+ """
+ We shall NOT return "English—" in CoherenceMatches because it is an alternative
+ of "English". This function only keeps the best match and remove the em-dash in it.
+ """
+ index_results: Dict[str, List[float]] = dict()
+
+ for result in results:
+ language, ratio = result
+ no_em_name: str = language.replace("—", "")
+
+ if no_em_name not in index_results:
+ index_results[no_em_name] = []
+
+ index_results[no_em_name].append(ratio)
+
+ if any(len(index_results[e]) > 1 for e in index_results):
+ filtered_results: CoherenceMatches = []
+
+ for language in index_results:
+ filtered_results.append((language, max(index_results[language])))
+
+ return filtered_results
+
+ return results
+
+
+@lru_cache(maxsize=2048)
+def coherence_ratio(
+ decoded_sequence: str, threshold: float = 0.1, lg_inclusion: Optional[str] = None
+) -> CoherenceMatches:
+ """
+ Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers.
+ A layer = Character extraction by alphabets/ranges.
+ """
+
+ results: List[Tuple[str, float]] = []
+ ignore_non_latin: bool = False
+
+ sufficient_match_count: int = 0
+
+ lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else []
+ if "Latin Based" in lg_inclusion_list:
+ ignore_non_latin = True
+ lg_inclusion_list.remove("Latin Based")
+
+ for layer in alpha_unicode_split(decoded_sequence):
+ sequence_frequencies: TypeCounter[str] = Counter(layer)
+ most_common = sequence_frequencies.most_common()
+
+ character_count: int = sum(o for c, o in most_common)
+
+ if character_count <= TOO_SMALL_SEQUENCE:
+ continue
+
+ popular_character_ordered: List[str] = [c for c, o in most_common]
+
+ for language in lg_inclusion_list or alphabet_languages(
+ popular_character_ordered, ignore_non_latin
+ ):
+ ratio: float = characters_popularity_compare(
+ language, popular_character_ordered
+ )
+
+ if ratio < threshold:
+ continue
+ elif ratio >= 0.8:
+ sufficient_match_count += 1
+
+ results.append((language, round(ratio, 4)))
+
+ if sufficient_match_count >= 3:
+ break
+
+ return sorted(
+ filter_alt_coherence_matches(results), key=lambda x: x[1], reverse=True
+ )
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/cli/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/cli/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/cli/normalizer.py b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/cli/normalizer.py
new file mode 100644
index 00000000..f4bcbaac
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/cli/normalizer.py
@@ -0,0 +1,296 @@
+import argparse
+import sys
+from json import dumps
+from os.path import abspath, basename, dirname, join, realpath
+from platform import python_version
+from typing import List, Optional
+from unicodedata import unidata_version
+
+import charset_normalizer.md as md_module
+from charset_normalizer import from_fp
+from charset_normalizer.models import CliDetectionResult
+from charset_normalizer.version import __version__
+
+
+def query_yes_no(question: str, default: str = "yes") -> bool:
+ """Ask a yes/no question via input() and return their answer.
+
+ "question" is a string that is presented to the user.
+ "default" is the presumed answer if the user just hits .
+ It must be "yes" (the default), "no" or None (meaning
+ an answer is required of the user).
+
+ The "answer" return value is True for "yes" or False for "no".
+
+ Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input
+ """
+ valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False}
+ if default is None:
+ prompt = " [y/n] "
+ elif default == "yes":
+ prompt = " [Y/n] "
+ elif default == "no":
+ prompt = " [y/N] "
+ else:
+ raise ValueError("invalid default answer: '%s'" % default)
+
+ while True:
+ sys.stdout.write(question + prompt)
+ choice = input().lower()
+ if default is not None and choice == "":
+ return valid[default]
+ elif choice in valid:
+ return valid[choice]
+ else:
+ sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n")
+
+
+def cli_detect(argv: Optional[List[str]] = None) -> int:
+ """
+ CLI assistant using ARGV and ArgumentParser
+ :param argv:
+ :return: 0 if everything is fine, anything else equal trouble
+ """
+ parser = argparse.ArgumentParser(
+ description="The Real First Universal Charset Detector. "
+ "Discover originating encoding used on text file. "
+ "Normalize text to unicode."
+ )
+
+ parser.add_argument(
+ "files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed"
+ )
+ parser.add_argument(
+ "-v",
+ "--verbose",
+ action="store_true",
+ default=False,
+ dest="verbose",
+ help="Display complementary information about file if any. "
+ "Stdout will contain logs about the detection process.",
+ )
+ parser.add_argument(
+ "-a",
+ "--with-alternative",
+ action="store_true",
+ default=False,
+ dest="alternatives",
+ help="Output complementary possibilities if any. Top-level JSON WILL be a list.",
+ )
+ parser.add_argument(
+ "-n",
+ "--normalize",
+ action="store_true",
+ default=False,
+ dest="normalize",
+ help="Permit to normalize input file. If not set, program does not write anything.",
+ )
+ parser.add_argument(
+ "-m",
+ "--minimal",
+ action="store_true",
+ default=False,
+ dest="minimal",
+ help="Only output the charset detected to STDOUT. Disabling JSON output.",
+ )
+ parser.add_argument(
+ "-r",
+ "--replace",
+ action="store_true",
+ default=False,
+ dest="replace",
+ help="Replace file when trying to normalize it instead of creating a new one.",
+ )
+ parser.add_argument(
+ "-f",
+ "--force",
+ action="store_true",
+ default=False,
+ dest="force",
+ help="Replace file without asking if you are sure, use this flag with caution.",
+ )
+ parser.add_argument(
+ "-t",
+ "--threshold",
+ action="store",
+ default=0.2,
+ type=float,
+ dest="threshold",
+ help="Define a custom maximum amount of chaos allowed in decoded content. 0. <= chaos <= 1.",
+ )
+ parser.add_argument(
+ "--version",
+ action="version",
+ version="Charset-Normalizer {} - Python {} - Unicode {} - SpeedUp {}".format(
+ __version__,
+ python_version(),
+ unidata_version,
+ "OFF" if md_module.__file__.lower().endswith(".py") else "ON",
+ ),
+ help="Show version information and exit.",
+ )
+
+ args = parser.parse_args(argv)
+
+ if args.replace is True and args.normalize is False:
+ print("Use --replace in addition of --normalize only.", file=sys.stderr)
+ return 1
+
+ if args.force is True and args.replace is False:
+ print("Use --force in addition of --replace only.", file=sys.stderr)
+ return 1
+
+ if args.threshold < 0.0 or args.threshold > 1.0:
+ print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr)
+ return 1
+
+ x_ = []
+
+ for my_file in args.files:
+ matches = from_fp(my_file, threshold=args.threshold, explain=args.verbose)
+
+ best_guess = matches.best()
+
+ if best_guess is None:
+ print(
+ 'Unable to identify originating encoding for "{}". {}'.format(
+ my_file.name,
+ "Maybe try increasing maximum amount of chaos."
+ if args.threshold < 1.0
+ else "",
+ ),
+ file=sys.stderr,
+ )
+ x_.append(
+ CliDetectionResult(
+ abspath(my_file.name),
+ None,
+ [],
+ [],
+ "Unknown",
+ [],
+ False,
+ 1.0,
+ 0.0,
+ None,
+ True,
+ )
+ )
+ else:
+ x_.append(
+ CliDetectionResult(
+ abspath(my_file.name),
+ best_guess.encoding,
+ best_guess.encoding_aliases,
+ [
+ cp
+ for cp in best_guess.could_be_from_charset
+ if cp != best_guess.encoding
+ ],
+ best_guess.language,
+ best_guess.alphabets,
+ best_guess.bom,
+ best_guess.percent_chaos,
+ best_guess.percent_coherence,
+ None,
+ True,
+ )
+ )
+
+ if len(matches) > 1 and args.alternatives:
+ for el in matches:
+ if el != best_guess:
+ x_.append(
+ CliDetectionResult(
+ abspath(my_file.name),
+ el.encoding,
+ el.encoding_aliases,
+ [
+ cp
+ for cp in el.could_be_from_charset
+ if cp != el.encoding
+ ],
+ el.language,
+ el.alphabets,
+ el.bom,
+ el.percent_chaos,
+ el.percent_coherence,
+ None,
+ False,
+ )
+ )
+
+ if args.normalize is True:
+ if best_guess.encoding.startswith("utf") is True:
+ print(
+ '"{}" file does not need to be normalized, as it already came from unicode.'.format(
+ my_file.name
+ ),
+ file=sys.stderr,
+ )
+ if my_file.closed is False:
+ my_file.close()
+ continue
+
+ dir_path = dirname(realpath(my_file.name))
+ file_name = basename(realpath(my_file.name))
+
+ o_: List[str] = file_name.split(".")
+
+ if args.replace is False:
+ o_.insert(-1, best_guess.encoding)
+ if my_file.closed is False:
+ my_file.close()
+ elif (
+ args.force is False
+ and query_yes_no(
+ 'Are you sure to normalize "{}" by replacing it ?'.format(
+ my_file.name
+ ),
+ "no",
+ )
+ is False
+ ):
+ if my_file.closed is False:
+ my_file.close()
+ continue
+
+ try:
+ x_[0].unicode_path = join(dir_path, ".".join(o_))
+
+ with open(x_[0].unicode_path, "w", encoding="utf-8") as fp:
+ fp.write(str(best_guess))
+ except IOError as e:
+ print(str(e), file=sys.stderr)
+ if my_file.closed is False:
+ my_file.close()
+ return 2
+
+ if my_file.closed is False:
+ my_file.close()
+
+ if args.minimal is False:
+ print(
+ dumps(
+ [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__,
+ ensure_ascii=True,
+ indent=4,
+ )
+ )
+ else:
+ for my_file in args.files:
+ print(
+ ", ".join(
+ [
+ el.encoding or "undefined"
+ for el in x_
+ if el.path == abspath(my_file.name)
+ ]
+ )
+ )
+
+ return 0
+
+
+if __name__ == "__main__":
+ cli_detect()
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/constant.py b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/constant.py
new file mode 100644
index 00000000..3188108d
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/constant.py
@@ -0,0 +1,495 @@
+from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE
+from encodings.aliases import aliases
+from re import IGNORECASE, compile as re_compile
+from typing import Dict, List, Set, Union
+
+from .assets import FREQUENCIES
+
+# Contain for each eligible encoding a list of/item bytes SIG/BOM
+ENCODING_MARKS: Dict[str, Union[bytes, List[bytes]]] = {
+ "utf_8": BOM_UTF8,
+ "utf_7": [
+ b"\x2b\x2f\x76\x38",
+ b"\x2b\x2f\x76\x39",
+ b"\x2b\x2f\x76\x2b",
+ b"\x2b\x2f\x76\x2f",
+ b"\x2b\x2f\x76\x38\x2d",
+ ],
+ "gb18030": b"\x84\x31\x95\x33",
+ "utf_32": [BOM_UTF32_BE, BOM_UTF32_LE],
+ "utf_16": [BOM_UTF16_BE, BOM_UTF16_LE],
+}
+
+TOO_SMALL_SEQUENCE: int = 32
+TOO_BIG_SEQUENCE: int = int(10e6)
+
+UTF8_MAXIMAL_ALLOCATION: int = 1112064
+
+UNICODE_RANGES_COMBINED: Dict[str, range] = {
+ "Control character": range(31 + 1),
+ "Basic Latin": range(32, 127 + 1),
+ "Latin-1 Supplement": range(128, 255 + 1),
+ "Latin Extended-A": range(256, 383 + 1),
+ "Latin Extended-B": range(384, 591 + 1),
+ "IPA Extensions": range(592, 687 + 1),
+ "Spacing Modifier Letters": range(688, 767 + 1),
+ "Combining Diacritical Marks": range(768, 879 + 1),
+ "Greek and Coptic": range(880, 1023 + 1),
+ "Cyrillic": range(1024, 1279 + 1),
+ "Cyrillic Supplement": range(1280, 1327 + 1),
+ "Armenian": range(1328, 1423 + 1),
+ "Hebrew": range(1424, 1535 + 1),
+ "Arabic": range(1536, 1791 + 1),
+ "Syriac": range(1792, 1871 + 1),
+ "Arabic Supplement": range(1872, 1919 + 1),
+ "Thaana": range(1920, 1983 + 1),
+ "NKo": range(1984, 2047 + 1),
+ "Samaritan": range(2048, 2111 + 1),
+ "Mandaic": range(2112, 2143 + 1),
+ "Syriac Supplement": range(2144, 2159 + 1),
+ "Arabic Extended-A": range(2208, 2303 + 1),
+ "Devanagari": range(2304, 2431 + 1),
+ "Bengali": range(2432, 2559 + 1),
+ "Gurmukhi": range(2560, 2687 + 1),
+ "Gujarati": range(2688, 2815 + 1),
+ "Oriya": range(2816, 2943 + 1),
+ "Tamil": range(2944, 3071 + 1),
+ "Telugu": range(3072, 3199 + 1),
+ "Kannada": range(3200, 3327 + 1),
+ "Malayalam": range(3328, 3455 + 1),
+ "Sinhala": range(3456, 3583 + 1),
+ "Thai": range(3584, 3711 + 1),
+ "Lao": range(3712, 3839 + 1),
+ "Tibetan": range(3840, 4095 + 1),
+ "Myanmar": range(4096, 4255 + 1),
+ "Georgian": range(4256, 4351 + 1),
+ "Hangul Jamo": range(4352, 4607 + 1),
+ "Ethiopic": range(4608, 4991 + 1),
+ "Ethiopic Supplement": range(4992, 5023 + 1),
+ "Cherokee": range(5024, 5119 + 1),
+ "Unified Canadian Aboriginal Syllabics": range(5120, 5759 + 1),
+ "Ogham": range(5760, 5791 + 1),
+ "Runic": range(5792, 5887 + 1),
+ "Tagalog": range(5888, 5919 + 1),
+ "Hanunoo": range(5920, 5951 + 1),
+ "Buhid": range(5952, 5983 + 1),
+ "Tagbanwa": range(5984, 6015 + 1),
+ "Khmer": range(6016, 6143 + 1),
+ "Mongolian": range(6144, 6319 + 1),
+ "Unified Canadian Aboriginal Syllabics Extended": range(6320, 6399 + 1),
+ "Limbu": range(6400, 6479 + 1),
+ "Tai Le": range(6480, 6527 + 1),
+ "New Tai Lue": range(6528, 6623 + 1),
+ "Khmer Symbols": range(6624, 6655 + 1),
+ "Buginese": range(6656, 6687 + 1),
+ "Tai Tham": range(6688, 6831 + 1),
+ "Combining Diacritical Marks Extended": range(6832, 6911 + 1),
+ "Balinese": range(6912, 7039 + 1),
+ "Sundanese": range(7040, 7103 + 1),
+ "Batak": range(7104, 7167 + 1),
+ "Lepcha": range(7168, 7247 + 1),
+ "Ol Chiki": range(7248, 7295 + 1),
+ "Cyrillic Extended C": range(7296, 7311 + 1),
+ "Sundanese Supplement": range(7360, 7375 + 1),
+ "Vedic Extensions": range(7376, 7423 + 1),
+ "Phonetic Extensions": range(7424, 7551 + 1),
+ "Phonetic Extensions Supplement": range(7552, 7615 + 1),
+ "Combining Diacritical Marks Supplement": range(7616, 7679 + 1),
+ "Latin Extended Additional": range(7680, 7935 + 1),
+ "Greek Extended": range(7936, 8191 + 1),
+ "General Punctuation": range(8192, 8303 + 1),
+ "Superscripts and Subscripts": range(8304, 8351 + 1),
+ "Currency Symbols": range(8352, 8399 + 1),
+ "Combining Diacritical Marks for Symbols": range(8400, 8447 + 1),
+ "Letterlike Symbols": range(8448, 8527 + 1),
+ "Number Forms": range(8528, 8591 + 1),
+ "Arrows": range(8592, 8703 + 1),
+ "Mathematical Operators": range(8704, 8959 + 1),
+ "Miscellaneous Technical": range(8960, 9215 + 1),
+ "Control Pictures": range(9216, 9279 + 1),
+ "Optical Character Recognition": range(9280, 9311 + 1),
+ "Enclosed Alphanumerics": range(9312, 9471 + 1),
+ "Box Drawing": range(9472, 9599 + 1),
+ "Block Elements": range(9600, 9631 + 1),
+ "Geometric Shapes": range(9632, 9727 + 1),
+ "Miscellaneous Symbols": range(9728, 9983 + 1),
+ "Dingbats": range(9984, 10175 + 1),
+ "Miscellaneous Mathematical Symbols-A": range(10176, 10223 + 1),
+ "Supplemental Arrows-A": range(10224, 10239 + 1),
+ "Braille Patterns": range(10240, 10495 + 1),
+ "Supplemental Arrows-B": range(10496, 10623 + 1),
+ "Miscellaneous Mathematical Symbols-B": range(10624, 10751 + 1),
+ "Supplemental Mathematical Operators": range(10752, 11007 + 1),
+ "Miscellaneous Symbols and Arrows": range(11008, 11263 + 1),
+ "Glagolitic": range(11264, 11359 + 1),
+ "Latin Extended-C": range(11360, 11391 + 1),
+ "Coptic": range(11392, 11519 + 1),
+ "Georgian Supplement": range(11520, 11567 + 1),
+ "Tifinagh": range(11568, 11647 + 1),
+ "Ethiopic Extended": range(11648, 11743 + 1),
+ "Cyrillic Extended-A": range(11744, 11775 + 1),
+ "Supplemental Punctuation": range(11776, 11903 + 1),
+ "CJK Radicals Supplement": range(11904, 12031 + 1),
+ "Kangxi Radicals": range(12032, 12255 + 1),
+ "Ideographic Description Characters": range(12272, 12287 + 1),
+ "CJK Symbols and Punctuation": range(12288, 12351 + 1),
+ "Hiragana": range(12352, 12447 + 1),
+ "Katakana": range(12448, 12543 + 1),
+ "Bopomofo": range(12544, 12591 + 1),
+ "Hangul Compatibility Jamo": range(12592, 12687 + 1),
+ "Kanbun": range(12688, 12703 + 1),
+ "Bopomofo Extended": range(12704, 12735 + 1),
+ "CJK Strokes": range(12736, 12783 + 1),
+ "Katakana Phonetic Extensions": range(12784, 12799 + 1),
+ "Enclosed CJK Letters and Months": range(12800, 13055 + 1),
+ "CJK Compatibility": range(13056, 13311 + 1),
+ "CJK Unified Ideographs Extension A": range(13312, 19903 + 1),
+ "Yijing Hexagram Symbols": range(19904, 19967 + 1),
+ "CJK Unified Ideographs": range(19968, 40959 + 1),
+ "Yi Syllables": range(40960, 42127 + 1),
+ "Yi Radicals": range(42128, 42191 + 1),
+ "Lisu": range(42192, 42239 + 1),
+ "Vai": range(42240, 42559 + 1),
+ "Cyrillic Extended-B": range(42560, 42655 + 1),
+ "Bamum": range(42656, 42751 + 1),
+ "Modifier Tone Letters": range(42752, 42783 + 1),
+ "Latin Extended-D": range(42784, 43007 + 1),
+ "Syloti Nagri": range(43008, 43055 + 1),
+ "Common Indic Number Forms": range(43056, 43071 + 1),
+ "Phags-pa": range(43072, 43135 + 1),
+ "Saurashtra": range(43136, 43231 + 1),
+ "Devanagari Extended": range(43232, 43263 + 1),
+ "Kayah Li": range(43264, 43311 + 1),
+ "Rejang": range(43312, 43359 + 1),
+ "Hangul Jamo Extended-A": range(43360, 43391 + 1),
+ "Javanese": range(43392, 43487 + 1),
+ "Myanmar Extended-B": range(43488, 43519 + 1),
+ "Cham": range(43520, 43615 + 1),
+ "Myanmar Extended-A": range(43616, 43647 + 1),
+ "Tai Viet": range(43648, 43743 + 1),
+ "Meetei Mayek Extensions": range(43744, 43775 + 1),
+ "Ethiopic Extended-A": range(43776, 43823 + 1),
+ "Latin Extended-E": range(43824, 43887 + 1),
+ "Cherokee Supplement": range(43888, 43967 + 1),
+ "Meetei Mayek": range(43968, 44031 + 1),
+ "Hangul Syllables": range(44032, 55215 + 1),
+ "Hangul Jamo Extended-B": range(55216, 55295 + 1),
+ "High Surrogates": range(55296, 56191 + 1),
+ "High Private Use Surrogates": range(56192, 56319 + 1),
+ "Low Surrogates": range(56320, 57343 + 1),
+ "Private Use Area": range(57344, 63743 + 1),
+ "CJK Compatibility Ideographs": range(63744, 64255 + 1),
+ "Alphabetic Presentation Forms": range(64256, 64335 + 1),
+ "Arabic Presentation Forms-A": range(64336, 65023 + 1),
+ "Variation Selectors": range(65024, 65039 + 1),
+ "Vertical Forms": range(65040, 65055 + 1),
+ "Combining Half Marks": range(65056, 65071 + 1),
+ "CJK Compatibility Forms": range(65072, 65103 + 1),
+ "Small Form Variants": range(65104, 65135 + 1),
+ "Arabic Presentation Forms-B": range(65136, 65279 + 1),
+ "Halfwidth and Fullwidth Forms": range(65280, 65519 + 1),
+ "Specials": range(65520, 65535 + 1),
+ "Linear B Syllabary": range(65536, 65663 + 1),
+ "Linear B Ideograms": range(65664, 65791 + 1),
+ "Aegean Numbers": range(65792, 65855 + 1),
+ "Ancient Greek Numbers": range(65856, 65935 + 1),
+ "Ancient Symbols": range(65936, 65999 + 1),
+ "Phaistos Disc": range(66000, 66047 + 1),
+ "Lycian": range(66176, 66207 + 1),
+ "Carian": range(66208, 66271 + 1),
+ "Coptic Epact Numbers": range(66272, 66303 + 1),
+ "Old Italic": range(66304, 66351 + 1),
+ "Gothic": range(66352, 66383 + 1),
+ "Old Permic": range(66384, 66431 + 1),
+ "Ugaritic": range(66432, 66463 + 1),
+ "Old Persian": range(66464, 66527 + 1),
+ "Deseret": range(66560, 66639 + 1),
+ "Shavian": range(66640, 66687 + 1),
+ "Osmanya": range(66688, 66735 + 1),
+ "Osage": range(66736, 66815 + 1),
+ "Elbasan": range(66816, 66863 + 1),
+ "Caucasian Albanian": range(66864, 66927 + 1),
+ "Linear A": range(67072, 67455 + 1),
+ "Cypriot Syllabary": range(67584, 67647 + 1),
+ "Imperial Aramaic": range(67648, 67679 + 1),
+ "Palmyrene": range(67680, 67711 + 1),
+ "Nabataean": range(67712, 67759 + 1),
+ "Hatran": range(67808, 67839 + 1),
+ "Phoenician": range(67840, 67871 + 1),
+ "Lydian": range(67872, 67903 + 1),
+ "Meroitic Hieroglyphs": range(67968, 67999 + 1),
+ "Meroitic Cursive": range(68000, 68095 + 1),
+ "Kharoshthi": range(68096, 68191 + 1),
+ "Old South Arabian": range(68192, 68223 + 1),
+ "Old North Arabian": range(68224, 68255 + 1),
+ "Manichaean": range(68288, 68351 + 1),
+ "Avestan": range(68352, 68415 + 1),
+ "Inscriptional Parthian": range(68416, 68447 + 1),
+ "Inscriptional Pahlavi": range(68448, 68479 + 1),
+ "Psalter Pahlavi": range(68480, 68527 + 1),
+ "Old Turkic": range(68608, 68687 + 1),
+ "Old Hungarian": range(68736, 68863 + 1),
+ "Rumi Numeral Symbols": range(69216, 69247 + 1),
+ "Brahmi": range(69632, 69759 + 1),
+ "Kaithi": range(69760, 69839 + 1),
+ "Sora Sompeng": range(69840, 69887 + 1),
+ "Chakma": range(69888, 69967 + 1),
+ "Mahajani": range(69968, 70015 + 1),
+ "Sharada": range(70016, 70111 + 1),
+ "Sinhala Archaic Numbers": range(70112, 70143 + 1),
+ "Khojki": range(70144, 70223 + 1),
+ "Multani": range(70272, 70319 + 1),
+ "Khudawadi": range(70320, 70399 + 1),
+ "Grantha": range(70400, 70527 + 1),
+ "Newa": range(70656, 70783 + 1),
+ "Tirhuta": range(70784, 70879 + 1),
+ "Siddham": range(71040, 71167 + 1),
+ "Modi": range(71168, 71263 + 1),
+ "Mongolian Supplement": range(71264, 71295 + 1),
+ "Takri": range(71296, 71375 + 1),
+ "Ahom": range(71424, 71487 + 1),
+ "Warang Citi": range(71840, 71935 + 1),
+ "Zanabazar Square": range(72192, 72271 + 1),
+ "Soyombo": range(72272, 72367 + 1),
+ "Pau Cin Hau": range(72384, 72447 + 1),
+ "Bhaiksuki": range(72704, 72815 + 1),
+ "Marchen": range(72816, 72895 + 1),
+ "Masaram Gondi": range(72960, 73055 + 1),
+ "Cuneiform": range(73728, 74751 + 1),
+ "Cuneiform Numbers and Punctuation": range(74752, 74879 + 1),
+ "Early Dynastic Cuneiform": range(74880, 75087 + 1),
+ "Egyptian Hieroglyphs": range(77824, 78895 + 1),
+ "Anatolian Hieroglyphs": range(82944, 83583 + 1),
+ "Bamum Supplement": range(92160, 92735 + 1),
+ "Mro": range(92736, 92783 + 1),
+ "Bassa Vah": range(92880, 92927 + 1),
+ "Pahawh Hmong": range(92928, 93071 + 1),
+ "Miao": range(93952, 94111 + 1),
+ "Ideographic Symbols and Punctuation": range(94176, 94207 + 1),
+ "Tangut": range(94208, 100351 + 1),
+ "Tangut Components": range(100352, 101119 + 1),
+ "Kana Supplement": range(110592, 110847 + 1),
+ "Kana Extended-A": range(110848, 110895 + 1),
+ "Nushu": range(110960, 111359 + 1),
+ "Duployan": range(113664, 113823 + 1),
+ "Shorthand Format Controls": range(113824, 113839 + 1),
+ "Byzantine Musical Symbols": range(118784, 119039 + 1),
+ "Musical Symbols": range(119040, 119295 + 1),
+ "Ancient Greek Musical Notation": range(119296, 119375 + 1),
+ "Tai Xuan Jing Symbols": range(119552, 119647 + 1),
+ "Counting Rod Numerals": range(119648, 119679 + 1),
+ "Mathematical Alphanumeric Symbols": range(119808, 120831 + 1),
+ "Sutton SignWriting": range(120832, 121519 + 1),
+ "Glagolitic Supplement": range(122880, 122927 + 1),
+ "Mende Kikakui": range(124928, 125151 + 1),
+ "Adlam": range(125184, 125279 + 1),
+ "Arabic Mathematical Alphabetic Symbols": range(126464, 126719 + 1),
+ "Mahjong Tiles": range(126976, 127023 + 1),
+ "Domino Tiles": range(127024, 127135 + 1),
+ "Playing Cards": range(127136, 127231 + 1),
+ "Enclosed Alphanumeric Supplement": range(127232, 127487 + 1),
+ "Enclosed Ideographic Supplement": range(127488, 127743 + 1),
+ "Miscellaneous Symbols and Pictographs": range(127744, 128511 + 1),
+ "Emoticons range(Emoji)": range(128512, 128591 + 1),
+ "Ornamental Dingbats": range(128592, 128639 + 1),
+ "Transport and Map Symbols": range(128640, 128767 + 1),
+ "Alchemical Symbols": range(128768, 128895 + 1),
+ "Geometric Shapes Extended": range(128896, 129023 + 1),
+ "Supplemental Arrows-C": range(129024, 129279 + 1),
+ "Supplemental Symbols and Pictographs": range(129280, 129535 + 1),
+ "CJK Unified Ideographs Extension B": range(131072, 173791 + 1),
+ "CJK Unified Ideographs Extension C": range(173824, 177983 + 1),
+ "CJK Unified Ideographs Extension D": range(177984, 178207 + 1),
+ "CJK Unified Ideographs Extension E": range(178208, 183983 + 1),
+ "CJK Unified Ideographs Extension F": range(183984, 191471 + 1),
+ "CJK Compatibility Ideographs Supplement": range(194560, 195103 + 1),
+ "Tags": range(917504, 917631 + 1),
+ "Variation Selectors Supplement": range(917760, 917999 + 1),
+}
+
+
+UNICODE_SECONDARY_RANGE_KEYWORD: List[str] = [
+ "Supplement",
+ "Extended",
+ "Extensions",
+ "Modifier",
+ "Marks",
+ "Punctuation",
+ "Symbols",
+ "Forms",
+ "Operators",
+ "Miscellaneous",
+ "Drawing",
+ "Block",
+ "Shapes",
+ "Supplemental",
+ "Tags",
+]
+
+RE_POSSIBLE_ENCODING_INDICATION = re_compile(
+ r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)",
+ IGNORECASE,
+)
+
+IANA_SUPPORTED: List[str] = sorted(
+ filter(
+ lambda x: x.endswith("_codec") is False
+ and x not in {"rot_13", "tactis", "mbcs"},
+ list(set(aliases.values())),
+ )
+)
+
+IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED)
+
+# pre-computed code page that are similar using the function cp_similarity.
+IANA_SUPPORTED_SIMILAR: Dict[str, List[str]] = {
+ "cp037": ["cp1026", "cp1140", "cp273", "cp500"],
+ "cp1026": ["cp037", "cp1140", "cp273", "cp500"],
+ "cp1125": ["cp866"],
+ "cp1140": ["cp037", "cp1026", "cp273", "cp500"],
+ "cp1250": ["iso8859_2"],
+ "cp1251": ["kz1048", "ptcp154"],
+ "cp1252": ["iso8859_15", "iso8859_9", "latin_1"],
+ "cp1253": ["iso8859_7"],
+ "cp1254": ["iso8859_15", "iso8859_9", "latin_1"],
+ "cp1257": ["iso8859_13"],
+ "cp273": ["cp037", "cp1026", "cp1140", "cp500"],
+ "cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"],
+ "cp500": ["cp037", "cp1026", "cp1140", "cp273"],
+ "cp850": ["cp437", "cp857", "cp858", "cp865"],
+ "cp857": ["cp850", "cp858", "cp865"],
+ "cp858": ["cp437", "cp850", "cp857", "cp865"],
+ "cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"],
+ "cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"],
+ "cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"],
+ "cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"],
+ "cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"],
+ "cp866": ["cp1125"],
+ "iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"],
+ "iso8859_11": ["tis_620"],
+ "iso8859_13": ["cp1257"],
+ "iso8859_14": [
+ "iso8859_10",
+ "iso8859_15",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_9",
+ "latin_1",
+ ],
+ "iso8859_15": [
+ "cp1252",
+ "cp1254",
+ "iso8859_10",
+ "iso8859_14",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_9",
+ "latin_1",
+ ],
+ "iso8859_16": [
+ "iso8859_14",
+ "iso8859_15",
+ "iso8859_2",
+ "iso8859_3",
+ "iso8859_9",
+ "latin_1",
+ ],
+ "iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"],
+ "iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"],
+ "iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"],
+ "iso8859_7": ["cp1253"],
+ "iso8859_9": [
+ "cp1252",
+ "cp1254",
+ "cp1258",
+ "iso8859_10",
+ "iso8859_14",
+ "iso8859_15",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_4",
+ "latin_1",
+ ],
+ "kz1048": ["cp1251", "ptcp154"],
+ "latin_1": [
+ "cp1252",
+ "cp1254",
+ "cp1258",
+ "iso8859_10",
+ "iso8859_14",
+ "iso8859_15",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_4",
+ "iso8859_9",
+ ],
+ "mac_iceland": ["mac_roman", "mac_turkish"],
+ "mac_roman": ["mac_iceland", "mac_turkish"],
+ "mac_turkish": ["mac_iceland", "mac_roman"],
+ "ptcp154": ["cp1251", "kz1048"],
+ "tis_620": ["iso8859_11"],
+}
+
+
+CHARDET_CORRESPONDENCE: Dict[str, str] = {
+ "iso2022_kr": "ISO-2022-KR",
+ "iso2022_jp": "ISO-2022-JP",
+ "euc_kr": "EUC-KR",
+ "tis_620": "TIS-620",
+ "utf_32": "UTF-32",
+ "euc_jp": "EUC-JP",
+ "koi8_r": "KOI8-R",
+ "iso8859_1": "ISO-8859-1",
+ "iso8859_2": "ISO-8859-2",
+ "iso8859_5": "ISO-8859-5",
+ "iso8859_6": "ISO-8859-6",
+ "iso8859_7": "ISO-8859-7",
+ "iso8859_8": "ISO-8859-8",
+ "utf_16": "UTF-16",
+ "cp855": "IBM855",
+ "mac_cyrillic": "MacCyrillic",
+ "gb2312": "GB2312",
+ "gb18030": "GB18030",
+ "cp932": "CP932",
+ "cp866": "IBM866",
+ "utf_8": "utf-8",
+ "utf_8_sig": "UTF-8-SIG",
+ "shift_jis": "SHIFT_JIS",
+ "big5": "Big5",
+ "cp1250": "windows-1250",
+ "cp1251": "windows-1251",
+ "cp1252": "Windows-1252",
+ "cp1253": "windows-1253",
+ "cp1255": "windows-1255",
+ "cp1256": "windows-1256",
+ "cp1254": "Windows-1254",
+ "cp949": "CP949",
+}
+
+
+COMMON_SAFE_ASCII_CHARACTERS: Set[str] = {
+ "<",
+ ">",
+ "=",
+ ":",
+ "/",
+ "&",
+ ";",
+ "{",
+ "}",
+ "[",
+ "]",
+ ",",
+ "|",
+ '"',
+ "-",
+}
+
+
+KO_NAMES: Set[str] = {"johab", "cp949", "euc_kr"}
+ZH_NAMES: Set[str] = {"big5", "cp950", "big5hkscs", "hz"}
+
+LANGUAGE_SUPPORTED_COUNT: int = len(FREQUENCIES)
+
+# Logging LEVEL below DEBUG
+TRACE: int = 5
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/legacy.py b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/legacy.py
new file mode 100644
index 00000000..43aad21a
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/legacy.py
@@ -0,0 +1,54 @@
+from typing import Any, Dict, Optional, Union
+from warnings import warn
+
+from .api import from_bytes
+from .constant import CHARDET_CORRESPONDENCE
+
+
+def detect(
+ byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any
+) -> Dict[str, Optional[Union[str, float]]]:
+ """
+ chardet legacy method
+ Detect the encoding of the given byte string. It should be mostly backward-compatible.
+ Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it)
+ This function is deprecated and should be used to migrate your project easily, consult the documentation for
+ further information. Not planned for removal.
+
+ :param byte_str: The byte sequence to examine.
+ :param should_rename_legacy: Should we rename legacy encodings
+ to their more modern equivalents?
+ """
+ if len(kwargs):
+ warn(
+ f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()"
+ )
+
+ if not isinstance(byte_str, (bytearray, bytes)):
+ raise TypeError( # pragma: nocover
+ "Expected object of type bytes or bytearray, got: "
+ "{0}".format(type(byte_str))
+ )
+
+ if isinstance(byte_str, bytearray):
+ byte_str = bytes(byte_str)
+
+ r = from_bytes(byte_str).best()
+
+ encoding = r.encoding if r is not None else None
+ language = r.language if r is not None and r.language != "Unknown" else ""
+ confidence = 1.0 - r.chaos if r is not None else None
+
+ # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process
+ # but chardet does return 'utf-8-sig' and it is a valid codec name.
+ if r is not None and encoding == "utf_8" and r.bom:
+ encoding += "_sig"
+
+ if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE:
+ encoding = CHARDET_CORRESPONDENCE[encoding]
+
+ return {
+ "encoding": encoding,
+ "language": language,
+ "confidence": confidence,
+ }
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/md.cpython-39-darwin.so b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/md.cpython-39-darwin.so
new file mode 100755
index 00000000..b17e9d5d
Binary files /dev/null and b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/md.cpython-39-darwin.so differ
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/md.py b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/md.py
new file mode 100644
index 00000000..56e9321a
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/md.py
@@ -0,0 +1,571 @@
+from functools import lru_cache
+from logging import getLogger
+from typing import List, Optional
+
+from .constant import (
+ COMMON_SAFE_ASCII_CHARACTERS,
+ TRACE,
+ UNICODE_SECONDARY_RANGE_KEYWORD,
+)
+from .utils import (
+ is_accentuated,
+ is_ascii,
+ is_case_variable,
+ is_cjk,
+ is_emoticon,
+ is_hangul,
+ is_hiragana,
+ is_katakana,
+ is_latin,
+ is_punctuation,
+ is_separator,
+ is_symbol,
+ is_thai,
+ is_unprintable,
+ remove_accent,
+ unicode_range,
+)
+
+
+class MessDetectorPlugin:
+ """
+ Base abstract class used for mess detection plugins.
+ All detectors MUST extend and implement given methods.
+ """
+
+ def eligible(self, character: str) -> bool:
+ """
+ Determine if given character should be fed in.
+ """
+ raise NotImplementedError # pragma: nocover
+
+ def feed(self, character: str) -> None:
+ """
+ The main routine to be executed upon character.
+ Insert the logic in witch the text would be considered chaotic.
+ """
+ raise NotImplementedError # pragma: nocover
+
+ def reset(self) -> None: # pragma: no cover
+ """
+ Permit to reset the plugin to the initial state.
+ """
+ raise NotImplementedError
+
+ @property
+ def ratio(self) -> float:
+ """
+ Compute the chaos ratio based on what your feed() has seen.
+ Must NOT be lower than 0.; No restriction gt 0.
+ """
+ raise NotImplementedError # pragma: nocover
+
+
+class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._punctuation_count: int = 0
+ self._symbol_count: int = 0
+ self._character_count: int = 0
+
+ self._last_printable_char: Optional[str] = None
+ self._frenzy_symbol_in_word: bool = False
+
+ def eligible(self, character: str) -> bool:
+ return character.isprintable()
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+
+ if (
+ character != self._last_printable_char
+ and character not in COMMON_SAFE_ASCII_CHARACTERS
+ ):
+ if is_punctuation(character):
+ self._punctuation_count += 1
+ elif (
+ character.isdigit() is False
+ and is_symbol(character)
+ and is_emoticon(character) is False
+ ):
+ self._symbol_count += 2
+
+ self._last_printable_char = character
+
+ def reset(self) -> None: # pragma: no cover
+ self._punctuation_count = 0
+ self._character_count = 0
+ self._symbol_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0:
+ return 0.0
+
+ ratio_of_punctuation: float = (
+ self._punctuation_count + self._symbol_count
+ ) / self._character_count
+
+ return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0
+
+
+class TooManyAccentuatedPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._character_count: int = 0
+ self._accentuated_count: int = 0
+
+ def eligible(self, character: str) -> bool:
+ return character.isalpha()
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+
+ if is_accentuated(character):
+ self._accentuated_count += 1
+
+ def reset(self) -> None: # pragma: no cover
+ self._character_count = 0
+ self._accentuated_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0 or self._character_count < 8:
+ return 0.0
+ ratio_of_accentuation: float = self._accentuated_count / self._character_count
+ return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0
+
+
+class UnprintablePlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._unprintable_count: int = 0
+ self._character_count: int = 0
+
+ def eligible(self, character: str) -> bool:
+ return True
+
+ def feed(self, character: str) -> None:
+ if is_unprintable(character):
+ self._unprintable_count += 1
+ self._character_count += 1
+
+ def reset(self) -> None: # pragma: no cover
+ self._unprintable_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0:
+ return 0.0
+
+ return (self._unprintable_count * 8) / self._character_count
+
+
+class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._successive_count: int = 0
+ self._character_count: int = 0
+
+ self._last_latin_character: Optional[str] = None
+
+ def eligible(self, character: str) -> bool:
+ return character.isalpha() and is_latin(character)
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+ if (
+ self._last_latin_character is not None
+ and is_accentuated(character)
+ and is_accentuated(self._last_latin_character)
+ ):
+ if character.isupper() and self._last_latin_character.isupper():
+ self._successive_count += 1
+ # Worse if its the same char duplicated with different accent.
+ if remove_accent(character) == remove_accent(self._last_latin_character):
+ self._successive_count += 1
+ self._last_latin_character = character
+
+ def reset(self) -> None: # pragma: no cover
+ self._successive_count = 0
+ self._character_count = 0
+ self._last_latin_character = None
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0:
+ return 0.0
+
+ return (self._successive_count * 2) / self._character_count
+
+
+class SuspiciousRange(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._suspicious_successive_range_count: int = 0
+ self._character_count: int = 0
+ self._last_printable_seen: Optional[str] = None
+
+ def eligible(self, character: str) -> bool:
+ return character.isprintable()
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+
+ if (
+ character.isspace()
+ or is_punctuation(character)
+ or character in COMMON_SAFE_ASCII_CHARACTERS
+ ):
+ self._last_printable_seen = None
+ return
+
+ if self._last_printable_seen is None:
+ self._last_printable_seen = character
+ return
+
+ unicode_range_a: Optional[str] = unicode_range(self._last_printable_seen)
+ unicode_range_b: Optional[str] = unicode_range(character)
+
+ if is_suspiciously_successive_range(unicode_range_a, unicode_range_b):
+ self._suspicious_successive_range_count += 1
+
+ self._last_printable_seen = character
+
+ def reset(self) -> None: # pragma: no cover
+ self._character_count = 0
+ self._suspicious_successive_range_count = 0
+ self._last_printable_seen = None
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0:
+ return 0.0
+
+ ratio_of_suspicious_range_usage: float = (
+ self._suspicious_successive_range_count * 2
+ ) / self._character_count
+
+ if ratio_of_suspicious_range_usage < 0.1:
+ return 0.0
+
+ return ratio_of_suspicious_range_usage
+
+
+class SuperWeirdWordPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._word_count: int = 0
+ self._bad_word_count: int = 0
+ self._foreign_long_count: int = 0
+
+ self._is_current_word_bad: bool = False
+ self._foreign_long_watch: bool = False
+
+ self._character_count: int = 0
+ self._bad_character_count: int = 0
+
+ self._buffer: str = ""
+ self._buffer_accent_count: int = 0
+
+ def eligible(self, character: str) -> bool:
+ return True
+
+ def feed(self, character: str) -> None:
+ if character.isalpha():
+ self._buffer += character
+ if is_accentuated(character):
+ self._buffer_accent_count += 1
+ if (
+ self._foreign_long_watch is False
+ and (is_latin(character) is False or is_accentuated(character))
+ and is_cjk(character) is False
+ and is_hangul(character) is False
+ and is_katakana(character) is False
+ and is_hiragana(character) is False
+ and is_thai(character) is False
+ ):
+ self._foreign_long_watch = True
+ return
+ if not self._buffer:
+ return
+ if (
+ character.isspace() or is_punctuation(character) or is_separator(character)
+ ) and self._buffer:
+ self._word_count += 1
+ buffer_length: int = len(self._buffer)
+
+ self._character_count += buffer_length
+
+ if buffer_length >= 4:
+ if self._buffer_accent_count / buffer_length > 0.34:
+ self._is_current_word_bad = True
+ # Word/Buffer ending with a upper case accentuated letter are so rare,
+ # that we will consider them all as suspicious. Same weight as foreign_long suspicious.
+ if is_accentuated(self._buffer[-1]) and self._buffer[-1].isupper():
+ self._foreign_long_count += 1
+ self._is_current_word_bad = True
+ if buffer_length >= 24 and self._foreign_long_watch:
+ self._foreign_long_count += 1
+ self._is_current_word_bad = True
+
+ if self._is_current_word_bad:
+ self._bad_word_count += 1
+ self._bad_character_count += len(self._buffer)
+ self._is_current_word_bad = False
+
+ self._foreign_long_watch = False
+ self._buffer = ""
+ self._buffer_accent_count = 0
+ elif (
+ character not in {"<", ">", "-", "=", "~", "|", "_"}
+ and character.isdigit() is False
+ and is_symbol(character)
+ ):
+ self._is_current_word_bad = True
+ self._buffer += character
+
+ def reset(self) -> None: # pragma: no cover
+ self._buffer = ""
+ self._is_current_word_bad = False
+ self._foreign_long_watch = False
+ self._bad_word_count = 0
+ self._word_count = 0
+ self._character_count = 0
+ self._bad_character_count = 0
+ self._foreign_long_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._word_count <= 10 and self._foreign_long_count == 0:
+ return 0.0
+
+ return self._bad_character_count / self._character_count
+
+
+class CjkInvalidStopPlugin(MessDetectorPlugin):
+ """
+ GB(Chinese) based encoding often render the stop incorrectly when the content does not fit and
+ can be easily detected. Searching for the overuse of '丅' and '丄'.
+ """
+
+ def __init__(self) -> None:
+ self._wrong_stop_count: int = 0
+ self._cjk_character_count: int = 0
+
+ def eligible(self, character: str) -> bool:
+ return True
+
+ def feed(self, character: str) -> None:
+ if character in {"丅", "丄"}:
+ self._wrong_stop_count += 1
+ return
+ if is_cjk(character):
+ self._cjk_character_count += 1
+
+ def reset(self) -> None: # pragma: no cover
+ self._wrong_stop_count = 0
+ self._cjk_character_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._cjk_character_count < 16:
+ return 0.0
+ return self._wrong_stop_count / self._cjk_character_count
+
+
+class ArchaicUpperLowerPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._buf: bool = False
+
+ self._character_count_since_last_sep: int = 0
+
+ self._successive_upper_lower_count: int = 0
+ self._successive_upper_lower_count_final: int = 0
+
+ self._character_count: int = 0
+
+ self._last_alpha_seen: Optional[str] = None
+ self._current_ascii_only: bool = True
+
+ def eligible(self, character: str) -> bool:
+ return True
+
+ def feed(self, character: str) -> None:
+ is_concerned = character.isalpha() and is_case_variable(character)
+ chunk_sep = is_concerned is False
+
+ if chunk_sep and self._character_count_since_last_sep > 0:
+ if (
+ self._character_count_since_last_sep <= 64
+ and character.isdigit() is False
+ and self._current_ascii_only is False
+ ):
+ self._successive_upper_lower_count_final += (
+ self._successive_upper_lower_count
+ )
+
+ self._successive_upper_lower_count = 0
+ self._character_count_since_last_sep = 0
+ self._last_alpha_seen = None
+ self._buf = False
+ self._character_count += 1
+ self._current_ascii_only = True
+
+ return
+
+ if self._current_ascii_only is True and is_ascii(character) is False:
+ self._current_ascii_only = False
+
+ if self._last_alpha_seen is not None:
+ if (character.isupper() and self._last_alpha_seen.islower()) or (
+ character.islower() and self._last_alpha_seen.isupper()
+ ):
+ if self._buf is True:
+ self._successive_upper_lower_count += 2
+ self._buf = False
+ else:
+ self._buf = True
+ else:
+ self._buf = False
+
+ self._character_count += 1
+ self._character_count_since_last_sep += 1
+ self._last_alpha_seen = character
+
+ def reset(self) -> None: # pragma: no cover
+ self._character_count = 0
+ self._character_count_since_last_sep = 0
+ self._successive_upper_lower_count = 0
+ self._successive_upper_lower_count_final = 0
+ self._last_alpha_seen = None
+ self._buf = False
+ self._current_ascii_only = True
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0:
+ return 0.0
+
+ return self._successive_upper_lower_count_final / self._character_count
+
+
+@lru_cache(maxsize=1024)
+def is_suspiciously_successive_range(
+ unicode_range_a: Optional[str], unicode_range_b: Optional[str]
+) -> bool:
+ """
+ Determine if two Unicode range seen next to each other can be considered as suspicious.
+ """
+ if unicode_range_a is None or unicode_range_b is None:
+ return True
+
+ if unicode_range_a == unicode_range_b:
+ return False
+
+ if "Latin" in unicode_range_a and "Latin" in unicode_range_b:
+ return False
+
+ if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b:
+ return False
+
+ # Latin characters can be accompanied with a combining diacritical mark
+ # eg. Vietnamese.
+ if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and (
+ "Combining" in unicode_range_a or "Combining" in unicode_range_b
+ ):
+ return False
+
+ keywords_range_a, keywords_range_b = unicode_range_a.split(
+ " "
+ ), unicode_range_b.split(" ")
+
+ for el in keywords_range_a:
+ if el in UNICODE_SECONDARY_RANGE_KEYWORD:
+ continue
+ if el in keywords_range_b:
+ return False
+
+ # Japanese Exception
+ range_a_jp_chars, range_b_jp_chars = (
+ unicode_range_a
+ in (
+ "Hiragana",
+ "Katakana",
+ ),
+ unicode_range_b in ("Hiragana", "Katakana"),
+ )
+ if (range_a_jp_chars or range_b_jp_chars) and (
+ "CJK" in unicode_range_a or "CJK" in unicode_range_b
+ ):
+ return False
+ if range_a_jp_chars and range_b_jp_chars:
+ return False
+
+ if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b:
+ if "CJK" in unicode_range_a or "CJK" in unicode_range_b:
+ return False
+ if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin":
+ return False
+
+ # Chinese/Japanese use dedicated range for punctuation and/or separators.
+ if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or (
+ unicode_range_a in ["Katakana", "Hiragana"]
+ and unicode_range_b in ["Katakana", "Hiragana"]
+ ):
+ if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b:
+ return False
+ if "Forms" in unicode_range_a or "Forms" in unicode_range_b:
+ return False
+
+ return True
+
+
+@lru_cache(maxsize=2048)
+def mess_ratio(
+ decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False
+) -> float:
+ """
+ Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier.
+ """
+
+ detectors: List[MessDetectorPlugin] = [
+ md_class() for md_class in MessDetectorPlugin.__subclasses__()
+ ]
+
+ length: int = len(decoded_sequence) + 1
+
+ mean_mess_ratio: float = 0.0
+
+ if length < 512:
+ intermediary_mean_mess_ratio_calc: int = 32
+ elif length <= 1024:
+ intermediary_mean_mess_ratio_calc = 64
+ else:
+ intermediary_mean_mess_ratio_calc = 128
+
+ for character, index in zip(decoded_sequence + "\n", range(length)):
+ for detector in detectors:
+ if detector.eligible(character):
+ detector.feed(character)
+
+ if (
+ index > 0 and index % intermediary_mean_mess_ratio_calc == 0
+ ) or index == length - 1:
+ mean_mess_ratio = sum(dt.ratio for dt in detectors)
+
+ if mean_mess_ratio >= maximum_threshold:
+ break
+
+ if debug:
+ logger = getLogger("charset_normalizer")
+
+ logger.log(
+ TRACE,
+ "Mess-detector extended-analysis start. "
+ f"intermediary_mean_mess_ratio_calc={intermediary_mean_mess_ratio_calc} mean_mess_ratio={mean_mess_ratio} "
+ f"maximum_threshold={maximum_threshold}",
+ )
+
+ if len(decoded_sequence) > 16:
+ logger.log(TRACE, f"Starting with: {decoded_sequence[:16]}")
+ logger.log(TRACE, f"Ending with: {decoded_sequence[-16::]}")
+
+ for dt in detectors: # pragma: nocover
+ logger.log(TRACE, f"{dt.__class__}: {dt.ratio}")
+
+ return round(mean_mess_ratio, 3)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/md__mypyc.cpython-39-darwin.so b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/md__mypyc.cpython-39-darwin.so
new file mode 100755
index 00000000..951accb2
Binary files /dev/null and b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/md__mypyc.cpython-39-darwin.so differ
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/models.py b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/models.py
new file mode 100644
index 00000000..7f8ca389
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/models.py
@@ -0,0 +1,337 @@
+from encodings.aliases import aliases
+from hashlib import sha256
+from json import dumps
+from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
+
+from .constant import TOO_BIG_SEQUENCE
+from .utils import iana_name, is_multi_byte_encoding, unicode_range
+
+
+class CharsetMatch:
+ def __init__(
+ self,
+ payload: bytes,
+ guessed_encoding: str,
+ mean_mess_ratio: float,
+ has_sig_or_bom: bool,
+ languages: "CoherenceMatches",
+ decoded_payload: Optional[str] = None,
+ ):
+ self._payload: bytes = payload
+
+ self._encoding: str = guessed_encoding
+ self._mean_mess_ratio: float = mean_mess_ratio
+ self._languages: CoherenceMatches = languages
+ self._has_sig_or_bom: bool = has_sig_or_bom
+ self._unicode_ranges: Optional[List[str]] = None
+
+ self._leaves: List[CharsetMatch] = []
+ self._mean_coherence_ratio: float = 0.0
+
+ self._output_payload: Optional[bytes] = None
+ self._output_encoding: Optional[str] = None
+
+ self._string: Optional[str] = decoded_payload
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CharsetMatch):
+ raise TypeError(
+ "__eq__ cannot be invoked on {} and {}.".format(
+ str(other.__class__), str(self.__class__)
+ )
+ )
+ return self.encoding == other.encoding and self.fingerprint == other.fingerprint
+
+ def __lt__(self, other: object) -> bool:
+ """
+ Implemented to make sorted available upon CharsetMatches items.
+ """
+ if not isinstance(other, CharsetMatch):
+ raise ValueError
+
+ chaos_difference: float = abs(self.chaos - other.chaos)
+ coherence_difference: float = abs(self.coherence - other.coherence)
+
+ # Below 1% difference --> Use Coherence
+ if chaos_difference < 0.01 and coherence_difference > 0.02:
+ # When having a tough decision, use the result that decoded as many multi-byte as possible.
+ if chaos_difference == 0.0 and self.coherence == other.coherence:
+ return self.multi_byte_usage > other.multi_byte_usage
+ return self.coherence > other.coherence
+
+ return self.chaos < other.chaos
+
+ @property
+ def multi_byte_usage(self) -> float:
+ return 1.0 - len(str(self)) / len(self.raw)
+
+ def __str__(self) -> str:
+ # Lazy Str Loading
+ if self._string is None:
+ self._string = str(self._payload, self._encoding, "strict")
+ return self._string
+
+ def __repr__(self) -> str:
+ return "".format(self.encoding, self.fingerprint)
+
+ def add_submatch(self, other: "CharsetMatch") -> None:
+ if not isinstance(other, CharsetMatch) or other == self:
+ raise ValueError(
+ "Unable to add instance <{}> as a submatch of a CharsetMatch".format(
+ other.__class__
+ )
+ )
+
+ other._string = None # Unload RAM usage; dirty trick.
+ self._leaves.append(other)
+
+ @property
+ def encoding(self) -> str:
+ return self._encoding
+
+ @property
+ def encoding_aliases(self) -> List[str]:
+ """
+ Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855.
+ """
+ also_known_as: List[str] = []
+ for u, p in aliases.items():
+ if self.encoding == u:
+ also_known_as.append(p)
+ elif self.encoding == p:
+ also_known_as.append(u)
+ return also_known_as
+
+ @property
+ def bom(self) -> bool:
+ return self._has_sig_or_bom
+
+ @property
+ def byte_order_mark(self) -> bool:
+ return self._has_sig_or_bom
+
+ @property
+ def languages(self) -> List[str]:
+ """
+ Return the complete list of possible languages found in decoded sequence.
+ Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'.
+ """
+ return [e[0] for e in self._languages]
+
+ @property
+ def language(self) -> str:
+ """
+ Most probable language found in decoded sequence. If none were detected or inferred, the property will return
+ "Unknown".
+ """
+ if not self._languages:
+ # Trying to infer the language based on the given encoding
+ # Its either English or we should not pronounce ourselves in certain cases.
+ if "ascii" in self.could_be_from_charset:
+ return "English"
+
+ # doing it there to avoid circular import
+ from charset_normalizer.cd import encoding_languages, mb_encoding_languages
+
+ languages = (
+ mb_encoding_languages(self.encoding)
+ if is_multi_byte_encoding(self.encoding)
+ else encoding_languages(self.encoding)
+ )
+
+ if len(languages) == 0 or "Latin Based" in languages:
+ return "Unknown"
+
+ return languages[0]
+
+ return self._languages[0][0]
+
+ @property
+ def chaos(self) -> float:
+ return self._mean_mess_ratio
+
+ @property
+ def coherence(self) -> float:
+ if not self._languages:
+ return 0.0
+ return self._languages[0][1]
+
+ @property
+ def percent_chaos(self) -> float:
+ return round(self.chaos * 100, ndigits=3)
+
+ @property
+ def percent_coherence(self) -> float:
+ return round(self.coherence * 100, ndigits=3)
+
+ @property
+ def raw(self) -> bytes:
+ """
+ Original untouched bytes.
+ """
+ return self._payload
+
+ @property
+ def submatch(self) -> List["CharsetMatch"]:
+ return self._leaves
+
+ @property
+ def has_submatch(self) -> bool:
+ return len(self._leaves) > 0
+
+ @property
+ def alphabets(self) -> List[str]:
+ if self._unicode_ranges is not None:
+ return self._unicode_ranges
+ # list detected ranges
+ detected_ranges: List[Optional[str]] = [
+ unicode_range(char) for char in str(self)
+ ]
+ # filter and sort
+ self._unicode_ranges = sorted(list({r for r in detected_ranges if r}))
+ return self._unicode_ranges
+
+ @property
+ def could_be_from_charset(self) -> List[str]:
+ """
+ The complete list of encoding that output the exact SAME str result and therefore could be the originating
+ encoding.
+ This list does include the encoding available in property 'encoding'.
+ """
+ return [self._encoding] + [m.encoding for m in self._leaves]
+
+ def output(self, encoding: str = "utf_8") -> bytes:
+ """
+ Method to get re-encoded bytes payload using given target encoding. Default to UTF-8.
+ Any errors will be simply ignored by the encoder NOT replaced.
+ """
+ if self._output_encoding is None or self._output_encoding != encoding:
+ self._output_encoding = encoding
+ self._output_payload = str(self).encode(encoding, "replace")
+
+ return self._output_payload # type: ignore
+
+ @property
+ def fingerprint(self) -> str:
+ """
+ Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one.
+ """
+ return sha256(self.output()).hexdigest()
+
+
+class CharsetMatches:
+ """
+ Container with every CharsetMatch items ordered by default from most probable to the less one.
+ Act like a list(iterable) but does not implements all related methods.
+ """
+
+ def __init__(self, results: Optional[List[CharsetMatch]] = None):
+ self._results: List[CharsetMatch] = sorted(results) if results else []
+
+ def __iter__(self) -> Iterator[CharsetMatch]:
+ yield from self._results
+
+ def __getitem__(self, item: Union[int, str]) -> CharsetMatch:
+ """
+ Retrieve a single item either by its position or encoding name (alias may be used here).
+ Raise KeyError upon invalid index or encoding not present in results.
+ """
+ if isinstance(item, int):
+ return self._results[item]
+ if isinstance(item, str):
+ item = iana_name(item, False)
+ for result in self._results:
+ if item in result.could_be_from_charset:
+ return result
+ raise KeyError
+
+ def __len__(self) -> int:
+ return len(self._results)
+
+ def __bool__(self) -> bool:
+ return len(self._results) > 0
+
+ def append(self, item: CharsetMatch) -> None:
+ """
+ Insert a single match. Will be inserted accordingly to preserve sort.
+ Can be inserted as a submatch.
+ """
+ if not isinstance(item, CharsetMatch):
+ raise ValueError(
+ "Cannot append instance '{}' to CharsetMatches".format(
+ str(item.__class__)
+ )
+ )
+ # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage)
+ if len(item.raw) <= TOO_BIG_SEQUENCE:
+ for match in self._results:
+ if match.fingerprint == item.fingerprint and match.chaos == item.chaos:
+ match.add_submatch(item)
+ return
+ self._results.append(item)
+ self._results = sorted(self._results)
+
+ def best(self) -> Optional["CharsetMatch"]:
+ """
+ Simply return the first match. Strict equivalent to matches[0].
+ """
+ if not self._results:
+ return None
+ return self._results[0]
+
+ def first(self) -> Optional["CharsetMatch"]:
+ """
+ Redundant method, call the method best(). Kept for BC reasons.
+ """
+ return self.best()
+
+
+CoherenceMatch = Tuple[str, float]
+CoherenceMatches = List[CoherenceMatch]
+
+
+class CliDetectionResult:
+ def __init__(
+ self,
+ path: str,
+ encoding: Optional[str],
+ encoding_aliases: List[str],
+ alternative_encodings: List[str],
+ language: str,
+ alphabets: List[str],
+ has_sig_or_bom: bool,
+ chaos: float,
+ coherence: float,
+ unicode_path: Optional[str],
+ is_preferred: bool,
+ ):
+ self.path: str = path
+ self.unicode_path: Optional[str] = unicode_path
+ self.encoding: Optional[str] = encoding
+ self.encoding_aliases: List[str] = encoding_aliases
+ self.alternative_encodings: List[str] = alternative_encodings
+ self.language: str = language
+ self.alphabets: List[str] = alphabets
+ self.has_sig_or_bom: bool = has_sig_or_bom
+ self.chaos: float = chaos
+ self.coherence: float = coherence
+ self.is_preferred: bool = is_preferred
+
+ @property
+ def __dict__(self) -> Dict[str, Any]: # type: ignore
+ return {
+ "path": self.path,
+ "encoding": self.encoding,
+ "encoding_aliases": self.encoding_aliases,
+ "alternative_encodings": self.alternative_encodings,
+ "language": self.language,
+ "alphabets": self.alphabets,
+ "has_sig_or_bom": self.has_sig_or_bom,
+ "chaos": self.chaos,
+ "coherence": self.coherence,
+ "unicode_path": self.unicode_path,
+ "is_preferred": self.is_preferred,
+ }
+
+ def to_json(self) -> str:
+ return dumps(self.__dict__, ensure_ascii=True, indent=4)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/py.typed b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/py.typed
new file mode 100644
index 00000000..e69de29b
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/utils.py b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/utils.py
new file mode 100644
index 00000000..76eafc64
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/utils.py
@@ -0,0 +1,414 @@
+import importlib
+import logging
+import unicodedata
+from codecs import IncrementalDecoder
+from encodings.aliases import aliases
+from functools import lru_cache
+from re import findall
+from typing import Generator, List, Optional, Set, Tuple, Union
+
+from _multibytecodec import MultibyteIncrementalDecoder
+
+from .constant import (
+ ENCODING_MARKS,
+ IANA_SUPPORTED_SIMILAR,
+ RE_POSSIBLE_ENCODING_INDICATION,
+ UNICODE_RANGES_COMBINED,
+ UNICODE_SECONDARY_RANGE_KEYWORD,
+ UTF8_MAXIMAL_ALLOCATION,
+)
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_accentuated(character: str) -> bool:
+ try:
+ description: str = unicodedata.name(character)
+ except ValueError:
+ return False
+ return (
+ "WITH GRAVE" in description
+ or "WITH ACUTE" in description
+ or "WITH CEDILLA" in description
+ or "WITH DIAERESIS" in description
+ or "WITH CIRCUMFLEX" in description
+ or "WITH TILDE" in description
+ )
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def remove_accent(character: str) -> str:
+ decomposed: str = unicodedata.decomposition(character)
+ if not decomposed:
+ return character
+
+ codes: List[str] = decomposed.split(" ")
+
+ return chr(int(codes[0], 16))
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def unicode_range(character: str) -> Optional[str]:
+ """
+ Retrieve the Unicode range official name from a single character.
+ """
+ character_ord: int = ord(character)
+
+ for range_name, ord_range in UNICODE_RANGES_COMBINED.items():
+ if character_ord in ord_range:
+ return range_name
+
+ return None
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_latin(character: str) -> bool:
+ try:
+ description: str = unicodedata.name(character)
+ except ValueError:
+ return False
+ return "LATIN" in description
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_ascii(character: str) -> bool:
+ try:
+ character.encode("ascii")
+ except UnicodeEncodeError:
+ return False
+ return True
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_punctuation(character: str) -> bool:
+ character_category: str = unicodedata.category(character)
+
+ if "P" in character_category:
+ return True
+
+ character_range: Optional[str] = unicode_range(character)
+
+ if character_range is None:
+ return False
+
+ return "Punctuation" in character_range
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_symbol(character: str) -> bool:
+ character_category: str = unicodedata.category(character)
+
+ if "S" in character_category or "N" in character_category:
+ return True
+
+ character_range: Optional[str] = unicode_range(character)
+
+ if character_range is None:
+ return False
+
+ return "Forms" in character_range
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_emoticon(character: str) -> bool:
+ character_range: Optional[str] = unicode_range(character)
+
+ if character_range is None:
+ return False
+
+ return "Emoticons" in character_range
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_separator(character: str) -> bool:
+ if character.isspace() or character in {"|", "+", ",", ";", "<", ">"}:
+ return True
+
+ character_category: str = unicodedata.category(character)
+
+ return "Z" in character_category
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_case_variable(character: str) -> bool:
+ return character.islower() != character.isupper()
+
+
+def is_private_use_only(character: str) -> bool:
+ character_category: str = unicodedata.category(character)
+
+ return character_category == "Co"
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_cjk(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError:
+ return False
+
+ return "CJK" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_hiragana(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError:
+ return False
+
+ return "HIRAGANA" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_katakana(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError:
+ return False
+
+ return "KATAKANA" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_hangul(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError:
+ return False
+
+ return "HANGUL" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_thai(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError:
+ return False
+
+ return "THAI" in character_name
+
+
+@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED))
+def is_unicode_range_secondary(range_name: str) -> bool:
+ return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD)
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_unprintable(character: str) -> bool:
+ return (
+ character.isspace() is False # includes \n \t \r \v
+ and character.isprintable() is False
+ and character != "\x1A" # Why? Its the ASCII substitute character.
+ and character != "\ufeff" # bug discovered in Python,
+ # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space.
+ )
+
+
+def any_specified_encoding(sequence: bytes, search_zone: int = 4096) -> Optional[str]:
+ """
+ Extract using ASCII-only decoder any specified encoding in the first n-bytes.
+ """
+ if not isinstance(sequence, bytes):
+ raise TypeError
+
+ seq_len: int = len(sequence)
+
+ results: List[str] = findall(
+ RE_POSSIBLE_ENCODING_INDICATION,
+ sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"),
+ )
+
+ if len(results) == 0:
+ return None
+
+ for specified_encoding in results:
+ specified_encoding = specified_encoding.lower().replace("-", "_")
+
+ encoding_alias: str
+ encoding_iana: str
+
+ for encoding_alias, encoding_iana in aliases.items():
+ if encoding_alias == specified_encoding:
+ return encoding_iana
+ if encoding_iana == specified_encoding:
+ return encoding_iana
+
+ return None
+
+
+@lru_cache(maxsize=128)
+def is_multi_byte_encoding(name: str) -> bool:
+ """
+ Verify is a specific encoding is a multi byte one based on it IANA name
+ """
+ return name in {
+ "utf_8",
+ "utf_8_sig",
+ "utf_16",
+ "utf_16_be",
+ "utf_16_le",
+ "utf_32",
+ "utf_32_le",
+ "utf_32_be",
+ "utf_7",
+ } or issubclass(
+ importlib.import_module("encodings.{}".format(name)).IncrementalDecoder,
+ MultibyteIncrementalDecoder,
+ )
+
+
+def identify_sig_or_bom(sequence: bytes) -> Tuple[Optional[str], bytes]:
+ """
+ Identify and extract SIG/BOM in given sequence.
+ """
+
+ for iana_encoding in ENCODING_MARKS:
+ marks: Union[bytes, List[bytes]] = ENCODING_MARKS[iana_encoding]
+
+ if isinstance(marks, bytes):
+ marks = [marks]
+
+ for mark in marks:
+ if sequence.startswith(mark):
+ return iana_encoding, mark
+
+ return None, b""
+
+
+def should_strip_sig_or_bom(iana_encoding: str) -> bool:
+ return iana_encoding not in {"utf_16", "utf_32"}
+
+
+def iana_name(cp_name: str, strict: bool = True) -> str:
+ cp_name = cp_name.lower().replace("-", "_")
+
+ encoding_alias: str
+ encoding_iana: str
+
+ for encoding_alias, encoding_iana in aliases.items():
+ if cp_name in [encoding_alias, encoding_iana]:
+ return encoding_iana
+
+ if strict:
+ raise ValueError("Unable to retrieve IANA for '{}'".format(cp_name))
+
+ return cp_name
+
+
+def range_scan(decoded_sequence: str) -> List[str]:
+ ranges: Set[str] = set()
+
+ for character in decoded_sequence:
+ character_range: Optional[str] = unicode_range(character)
+
+ if character_range is None:
+ continue
+
+ ranges.add(character_range)
+
+ return list(ranges)
+
+
+def cp_similarity(iana_name_a: str, iana_name_b: str) -> float:
+ if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b):
+ return 0.0
+
+ decoder_a = importlib.import_module(
+ "encodings.{}".format(iana_name_a)
+ ).IncrementalDecoder
+ decoder_b = importlib.import_module(
+ "encodings.{}".format(iana_name_b)
+ ).IncrementalDecoder
+
+ id_a: IncrementalDecoder = decoder_a(errors="ignore")
+ id_b: IncrementalDecoder = decoder_b(errors="ignore")
+
+ character_match_count: int = 0
+
+ for i in range(255):
+ to_be_decoded: bytes = bytes([i])
+ if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded):
+ character_match_count += 1
+
+ return character_match_count / 254
+
+
+def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool:
+ """
+ Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using
+ the function cp_similarity.
+ """
+ return (
+ iana_name_a in IANA_SUPPORTED_SIMILAR
+ and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a]
+ )
+
+
+def set_logging_handler(
+ name: str = "charset_normalizer",
+ level: int = logging.INFO,
+ format_string: str = "%(asctime)s | %(levelname)s | %(message)s",
+) -> None:
+ logger = logging.getLogger(name)
+ logger.setLevel(level)
+
+ handler = logging.StreamHandler()
+ handler.setFormatter(logging.Formatter(format_string))
+ logger.addHandler(handler)
+
+
+def cut_sequence_chunks(
+ sequences: bytes,
+ encoding_iana: str,
+ offsets: range,
+ chunk_size: int,
+ bom_or_sig_available: bool,
+ strip_sig_or_bom: bool,
+ sig_payload: bytes,
+ is_multi_byte_decoder: bool,
+ decoded_payload: Optional[str] = None,
+) -> Generator[str, None, None]:
+ if decoded_payload and is_multi_byte_decoder is False:
+ for i in offsets:
+ chunk = decoded_payload[i : i + chunk_size]
+ if not chunk:
+ break
+ yield chunk
+ else:
+ for i in offsets:
+ chunk_end = i + chunk_size
+ if chunk_end > len(sequences) + 8:
+ continue
+
+ cut_sequence = sequences[i : i + chunk_size]
+
+ if bom_or_sig_available and strip_sig_or_bom is False:
+ cut_sequence = sig_payload + cut_sequence
+
+ chunk = cut_sequence.decode(
+ encoding_iana,
+ errors="ignore" if is_multi_byte_decoder else "strict",
+ )
+
+ # multi-byte bad cutting detector and adjustment
+ # not the cleanest way to perform that fix but clever enough for now.
+ if is_multi_byte_decoder and i > 0:
+ chunk_partial_size_chk: int = min(chunk_size, 16)
+
+ if (
+ decoded_payload
+ and chunk[:chunk_partial_size_chk] not in decoded_payload
+ ):
+ for j in range(i, i - 4, -1):
+ cut_sequence = sequences[j:chunk_end]
+
+ if bom_or_sig_available and strip_sig_or_bom is False:
+ cut_sequence = sig_payload + cut_sequence
+
+ chunk = cut_sequence.decode(encoding_iana, errors="ignore")
+
+ if chunk[:chunk_partial_size_chk] in decoded_payload:
+ break
+
+ yield chunk
diff --git a/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/version.py b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/version.py
new file mode 100644
index 00000000..b74c2643
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/charset_normalizer/version.py
@@ -0,0 +1,6 @@
+"""
+Expose version
+"""
+
+__version__ = "3.1.0"
+VERSION = __version__.split(".")
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/INSTALLER b/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/LICENSE b/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/LICENSE
new file mode 100644
index 00000000..07074259
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/LICENSE
@@ -0,0 +1,6 @@
+This software is made available under the terms of *either* of the licenses
+found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made
+under the terms of *both* these licenses.
+
+The code used in the OS random engine is derived from CPython, and is licensed
+under the terms of the PSF License Agreement.
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/LICENSE.APACHE b/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/LICENSE.APACHE
new file mode 100644
index 00000000..62589edd
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/LICENSE.APACHE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ https://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/LICENSE.BSD b/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/LICENSE.BSD
new file mode 100644
index 00000000..ec1a29d3
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/LICENSE.BSD
@@ -0,0 +1,27 @@
+Copyright (c) Individual contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ 3. Neither the name of PyCA Cryptography nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/LICENSE.PSF b/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/LICENSE.PSF
new file mode 100644
index 00000000..4d3a4f57
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/LICENSE.PSF
@@ -0,0 +1,41 @@
+1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and
+ the Individual or Organization ("Licensee") accessing and otherwise using Python
+ 2.7.12 software in source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+ grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+ analyze, test, perform and/or display publicly, prepare derivative works,
+ distribute, and otherwise use Python 2.7.12 alone or in any derivative
+ version, provided, however, that PSF's License Agreement and PSF's notice of
+ copyright, i.e., "Copyright © 2001-2016 Python Software Foundation; All Rights
+ Reserved" are retained in Python 2.7.12 alone or in any derivative version
+ prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on or
+ incorporates Python 2.7.12 or any part thereof, and wants to make the
+ derivative work available to others as provided herein, then Licensee hereby
+ agrees to include in any such work a brief summary of the changes made to Python
+ 2.7.12.
+
+4. PSF is making Python 2.7.12 available to Licensee on an "AS IS" basis.
+ PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF
+ EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR
+ WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE
+ USE OF PYTHON 2.7.12 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 2.7.12
+ FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF
+ MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 2.7.12, OR ANY DERIVATIVE
+ THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material breach of
+ its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any relationship
+ of agency, partnership, or joint venture between PSF and Licensee. This License
+ Agreement does not grant permission to use PSF trademarks or trade name in a
+ trademark sense to endorse or promote products or services of Licensee, or any
+ third party.
+
+8. By copying, installing or otherwise using Python 2.7.12, Licensee agrees
+ to be bound by the terms and conditions of this License Agreement.
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/METADATA b/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/METADATA
new file mode 100644
index 00000000..9ec3e34f
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/METADATA
@@ -0,0 +1,139 @@
+Metadata-Version: 2.1
+Name: cryptography
+Version: 40.0.2
+Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers.
+Home-page: https://github.com/pyca/cryptography
+Author: The Python Cryptographic Authority and individual contributors
+Author-email: cryptography-dev@python.org
+License: (Apache-2.0 OR BSD-3-Clause) AND PSF-2.0
+Project-URL: Documentation, https://cryptography.io/
+Project-URL: Source, https://github.com/pyca/cryptography/
+Project-URL: Issues, https://github.com/pyca/cryptography/issues
+Project-URL: Changelog, https://cryptography.io/en/latest/changelog/
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Natural Language :: English
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: POSIX :: BSD
+Classifier: Operating System :: POSIX :: Linux
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Security :: Cryptography
+Requires-Python: >=3.6
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+License-File: LICENSE.APACHE
+License-File: LICENSE.BSD
+License-File: LICENSE.PSF
+Requires-Dist: cffi (>=1.12)
+Provides-Extra: docs
+Requires-Dist: sphinx (>=5.3.0) ; extra == 'docs'
+Requires-Dist: sphinx-rtd-theme (>=1.1.1) ; extra == 'docs'
+Provides-Extra: docstest
+Requires-Dist: pyenchant (>=1.6.11) ; extra == 'docstest'
+Requires-Dist: twine (>=1.12.0) ; extra == 'docstest'
+Requires-Dist: sphinxcontrib-spelling (>=4.0.1) ; extra == 'docstest'
+Provides-Extra: pep8test
+Requires-Dist: black ; extra == 'pep8test'
+Requires-Dist: ruff ; extra == 'pep8test'
+Requires-Dist: mypy ; extra == 'pep8test'
+Requires-Dist: check-manifest ; extra == 'pep8test'
+Provides-Extra: sdist
+Requires-Dist: setuptools-rust (>=0.11.4) ; extra == 'sdist'
+Provides-Extra: ssh
+Requires-Dist: bcrypt (>=3.1.5) ; extra == 'ssh'
+Provides-Extra: test
+Requires-Dist: pytest (>=6.2.0) ; extra == 'test'
+Requires-Dist: pytest-shard (>=0.1.2) ; extra == 'test'
+Requires-Dist: pytest-benchmark ; extra == 'test'
+Requires-Dist: pytest-cov ; extra == 'test'
+Requires-Dist: pytest-subtests ; extra == 'test'
+Requires-Dist: pytest-xdist ; extra == 'test'
+Requires-Dist: pretend ; extra == 'test'
+Requires-Dist: iso8601 ; extra == 'test'
+Provides-Extra: test-randomorder
+Requires-Dist: pytest-randomly ; extra == 'test-randomorder'
+Provides-Extra: tox
+Requires-Dist: tox ; extra == 'tox'
+
+pyca/cryptography
+=================
+
+.. image:: https://img.shields.io/pypi/v/cryptography.svg
+ :target: https://pypi.org/project/cryptography/
+ :alt: Latest Version
+
+.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest
+ :target: https://cryptography.io
+ :alt: Latest Docs
+
+.. image:: https://github.com/pyca/cryptography/workflows/CI/badge.svg?branch=main
+ :target: https://github.com/pyca/cryptography/actions?query=workflow%3ACI+branch%3Amain
+
+
+``cryptography`` is a package which provides cryptographic recipes and
+primitives to Python developers. Our goal is for it to be your "cryptographic
+standard library". It supports Python 3.6+ and PyPy3 7.3.10+.
+
+``cryptography`` includes both high level recipes and low level interfaces to
+common cryptographic algorithms such as symmetric ciphers, message digests, and
+key derivation functions. For example, to encrypt something with
+``cryptography``'s high level symmetric encryption recipe:
+
+.. code-block:: pycon
+
+ >>> from cryptography.fernet import Fernet
+ >>> # Put this somewhere safe!
+ >>> key = Fernet.generate_key()
+ >>> f = Fernet(key)
+ >>> token = f.encrypt(b"A really secret message. Not for prying eyes.")
+ >>> token
+ b'...'
+ >>> f.decrypt(token)
+ b'A really secret message. Not for prying eyes.'
+
+You can find more information in the `documentation`_.
+
+You can install ``cryptography`` with:
+
+.. code-block:: console
+
+ $ pip install cryptography
+
+For full details see `the installation documentation`_.
+
+Discussion
+~~~~~~~~~~
+
+If you run into bugs, you can file them in our `issue tracker`_.
+
+We maintain a `cryptography-dev`_ mailing list for development discussion.
+
+You can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get
+involved.
+
+Security
+~~~~~~~~
+
+Need to report a security issue? Please consult our `security reporting`_
+documentation.
+
+
+.. _`documentation`: https://cryptography.io/
+.. _`the installation documentation`: https://cryptography.io/en/latest/installation/
+.. _`issue tracker`: https://github.com/pyca/cryptography/issues
+.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev
+.. _`security reporting`: https://cryptography.io/en/latest/security/
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/RECORD b/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/RECORD
new file mode 100644
index 00000000..f7b8cdc3
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/RECORD
@@ -0,0 +1,179 @@
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/__about__.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/__init__.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/exceptions.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/fernet.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/__init__.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/_oid.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/backends/__init__.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/__init__.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/aead.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/backend.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/ciphers.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/cmac.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/decode_asn1.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/dh.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/dsa.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/ec.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/ed25519.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/ed448.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/hashes.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/hmac.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/poly1305.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/rsa.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/utils.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/x448.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/bindings/__init__.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/__init__.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/_conditional.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/binding.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/__init__.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/_asymmetric.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/_serialization.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/dh.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ec.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/padding.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/types.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/utils.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/x448.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__init__.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/aead.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/base.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/modes.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/cmac.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/constant_time.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/hashes.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/hmac.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__init__.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/hkdf.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/scrypt.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/keywrap.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/padding.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/poly1305.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__init__.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/base.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/ssh.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/__init__.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/hotp.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/totp.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/utils.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/x509/__init__.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/x509/base.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/x509/certificate_transparency.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/x509/extensions.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/x509/general_name.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/x509/name.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/x509/ocsp.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/cryptography/x509/oid.cpython-39.pyc,,
+cryptography-40.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+cryptography-40.0.2.dist-info/LICENSE,sha256=Q9rSzHUqtyHNmp827OcPtTq3cTVR8tPYaU2OjFoG1uI,323
+cryptography-40.0.2.dist-info/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360
+cryptography-40.0.2.dist-info/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532
+cryptography-40.0.2.dist-info/LICENSE.PSF,sha256=aT7ApmKzn5laTyUrA6YiKUVHDBtvEsoCkY5O_g32S58,2415
+cryptography-40.0.2.dist-info/METADATA,sha256=4vr_Ul75nFZ67oS_cZnlG8AlL_7Vf22dXzFkq5RyGI4,5429
+cryptography-40.0.2.dist-info/RECORD,,
+cryptography-40.0.2.dist-info/WHEEL,sha256=z0IicLszzw49wRGUWpQ-tg9c2xoB_vTQrgq52nbV4V0,114
+cryptography-40.0.2.dist-info/top_level.txt,sha256=KNaT-Sn2K4uxNaEbe6mYdDn3qWDMlp4y-MtWfB73nJc,13
+cryptography/__about__.py,sha256=JBoadGQdL7H57fU7gVI4SwOFyL2HeX-BG9g5NeifhL0,409
+cryptography/__init__.py,sha256=v4pF_XcZ6sp_b7YlfP2eJA4lNCckeH2NCzN6WYXNnEc,759
+cryptography/exceptions.py,sha256=GNQJUZ9hpKs2ISYLQfA3FlwGjuWRTdhpsSXLmOex6j4,1405
+cryptography/fernet.py,sha256=qO4sQurx79k-5yOh4UnUZGm51zod0wRXJchz0l063To,6851
+cryptography/hazmat/__init__.py,sha256=OYlvgprzULzZlsf3yYTsd6VUVyQmpsbHjgJdNnsyRwE,418
+cryptography/hazmat/_oid.py,sha256=rCvnwb0z0VCKn7Y92IEQAoPErrANWREydYflZSNRrao,14155
+cryptography/hazmat/backends/__init__.py,sha256=bgrjB1SX2vXX-rmfG7A4PqGkq-isqQVXGaZtjWHAgj0,324
+cryptography/hazmat/backends/openssl/__init__.py,sha256=oCa7eZbqvHsQ1pBeD_OOfnGxVaZbCfWnAKnHqOyPf1c,270
+cryptography/hazmat/backends/openssl/aead.py,sha256=wzIsASMPfFuTEoCPSbfbuN67BglLdvENTft8j-fQDOM,10025
+cryptography/hazmat/backends/openssl/backend.py,sha256=ENYxxus4QtRhQFJBN5ezgKgris87MGj8nzVhcuE3Eyg,91919
+cryptography/hazmat/backends/openssl/ciphers.py,sha256=uCd2tiwF_-wdjgr2GSMHB6o6EKutHFh053feOjJkKWg,10346
+cryptography/hazmat/backends/openssl/cmac.py,sha256=cFZtDpqN5PNzo1X9tm8N8WDV5X81GRFXuXRUsjyFtF4,3005
+cryptography/hazmat/backends/openssl/decode_asn1.py,sha256=nSqtgO5MJVf_UUkvw9tez10zhGnsGHq24OP1X2GKOe4,1113
+cryptography/hazmat/backends/openssl/dh.py,sha256=1CNiNiqxAhzwfzaJcFT0M1vta_iEKPkiyO36ercd1dw,12186
+cryptography/hazmat/backends/openssl/dsa.py,sha256=SQwoCTiNHrWjDQOFag3GznWG5K9CWM1AizqJ4usTRbY,8927
+cryptography/hazmat/backends/openssl/ec.py,sha256=Wh83LtvxRfPHC-ZIxSUuCOFqIx1KT_jeC9gwCtLr1fY,11197
+cryptography/hazmat/backends/openssl/ed25519.py,sha256=adWaawleloe9T0BctejcclybE51dwb-CmL_b0f6zBiU,5921
+cryptography/hazmat/backends/openssl/ed448.py,sha256=Ja_GMzDBcs_8N2PpmU2dd6sszbJh3xP-TrN88MkQLBI,5875
+cryptography/hazmat/backends/openssl/hashes.py,sha256=yFuHeO8qDPRbH2B9JJtW51wEVfhu11SFs3lhHBHGyPA,3240
+cryptography/hazmat/backends/openssl/hmac.py,sha256=mN7irlzO6Rbc3UIDqlySwaW5KoCn28N8gKS3lh9WEUg,3094
+cryptography/hazmat/backends/openssl/poly1305.py,sha256=Oivx5k9DcAU_BSySxEQiw5tE1pcz-ljmFpmXAPZqJrI,2513
+cryptography/hazmat/backends/openssl/rsa.py,sha256=zrFVhttn-pc8HHmRZjR42z-XinFRvBZTftGLrPjqMMA,21580
+cryptography/hazmat/backends/openssl/utils.py,sha256=VZHD8U8p3G00LyeS0ImY36iu7TC0RW7nx9f2BCOAyQs,2156
+cryptography/hazmat/backends/openssl/x448.py,sha256=6tZgh44ipS_UWJ6amueXxc8xIXdIfFtdpvnhri-oxXs,4339
+cryptography/hazmat/bindings/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
+cryptography/hazmat/bindings/_rust.abi3.so,sha256=Gt2626Ns8ejVh0VX-WYqD4asaaKKbvDvsrdiwH0ftf8,15340458
+cryptography/hazmat/bindings/_rust/__init__.pyi,sha256=IumK7zP9Ko3HjLLb5hwZiY2rbfmfsuyTZLLcHOMvSdk,981
+cryptography/hazmat/bindings/_rust/_openssl.pyi,sha256=mpNJLuYLbCVrd5i33FBTmWwL_55Dw7JPkSLlSX9Q7oI,230
+cryptography/hazmat/bindings/_rust/asn1.pyi,sha256=9CyI-grOsLQB_hfnhJPoG9dNOdJ7Zg6B0iUpzCowh44,592
+cryptography/hazmat/bindings/_rust/ocsp.pyi,sha256=RzVaLkY0y9L8W8opAL_uVD8bySKxP23pSQtEbLOStXI,905
+cryptography/hazmat/bindings/_rust/openssl/__init__.pyi,sha256=sSz-RQXVQZ5EDbmEr0e5Km4OqrBKxHXUQwUQmRRkfdw,701
+cryptography/hazmat/bindings/_rust/openssl/x25519.pyi,sha256=-1F5QDZfrdhmDLKTeSERuuDUHBTV-EhxIYk9mjpwcG4,616
+cryptography/hazmat/bindings/_rust/pkcs7.pyi,sha256=VkTC78wjJgb_qrboOYIFPuFZ3W46zsr6zsxnlrOMwao,460
+cryptography/hazmat/bindings/_rust/x509.pyi,sha256=RaSbjBtObgnM66n1IudB34cFXrXamNpk_b2agiT99qE,1743
+cryptography/hazmat/bindings/openssl/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
+cryptography/hazmat/bindings/openssl/_conditional.py,sha256=uruUpaLLjgG5z2sckNFxS5TyJRhLcQ4zacklfdiEo8A,9165
+cryptography/hazmat/bindings/openssl/binding.py,sha256=2tiCAZziG2bMsa9Ke05hYY8EAiyczxOrvoCMFS_Ly38,7893
+cryptography/hazmat/primitives/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
+cryptography/hazmat/primitives/_asymmetric.py,sha256=QacvnyA1fcXWbSAASCiodHVcTYwkaMdzq6KUIlaO7H0,496
+cryptography/hazmat/primitives/_cipheralgorithm.py,sha256=TAlnDCAdYaa23-mb0TTbFLFhWwfdBF1DtXQdY9Koqf0,1057
+cryptography/hazmat/primitives/_serialization.py,sha256=r2ECtWEJ3JEgSpGkpaZrMfGhoQWdTHIn4gyLCB71fMg,5188
+cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180
+cryptography/hazmat/primitives/asymmetric/dh.py,sha256=qm9VWIMPIaWUlCxlscb5bAM2aCe11fu-y85Y6nYjC7I,6619
+cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=JufsxrrxeJQlsiWMmx_44l90FNRw19o9kcKtk4rO8TU,7885
+cryptography/hazmat/primitives/asymmetric/ec.py,sha256=CdxppDV1lV2QlrQ0EhniqvFi8wp8PDYsvFWdpzyyVIY,12725
+cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=MqgOJFdMOXcMBJ-b84tJYOOkORL9xmEUHwCpVpa1k2o,3344
+cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=6XjKKEvLQUzZgOFyLZCRkK4Tl0sKMrxfL8CYx8_omxM,3264
+cryptography/hazmat/primitives/asymmetric/padding.py,sha256=EkKuY9e6UFqSuQ0LvyKYKl_L19tOfNCTlHWEiKgHeUc,2690
+cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=njFky5AkSrsBh47PeVLjj81SOLOiZaxAUSzGWD2Znxw,11479
+cryptography/hazmat/primitives/asymmetric/types.py,sha256=_etLWzFIYf01_NHTi3lg5q593wckK2LXxAK_SF94Dpk,2960
+cryptography/hazmat/primitives/asymmetric/utils.py,sha256=p6nF7EzF0sp5GYFTw1HEhPYYjuTik53WTUkvuPIfDRk,755
+cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=H9gXtrvoO8qJutrDJ-rQNW1kjdbydkp6MD3PWxDWDiQ,3289
+cryptography/hazmat/primitives/asymmetric/x448.py,sha256=u9Ma5viyGMVjil4tv9GKsBxcT0rikom9MigjoJ3OgQ4,3189
+cryptography/hazmat/primitives/ciphers/__init__.py,sha256=2K5I_haxK0BLNqSZcQUqcjf8FmHY8xV1U-XjfgUmkM8,645
+cryptography/hazmat/primitives/ciphers/aead.py,sha256=c1wfOJ5alX-pDkWxz9cSq5M7ug2CtNc6gCovyMoy_bY,12032
+cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=vR1tcKRpaYbQhv3dLIiQNmaZpq7x9kLCdUvOgrWIG0I,4965
+cryptography/hazmat/primitives/ciphers/base.py,sha256=RYTyXd8gXgBggdEl0cWcEnx6mkd1YsdRT_3tLEV4iGU,8269
+cryptography/hazmat/primitives/ciphers/modes.py,sha256=U_flGFnHih452GO9X66BSN8U9xUmYh12wB6FFEIk6Xc,8326
+cryptography/hazmat/primitives/cmac.py,sha256=ZbpwI87EhO3maiwqzttN1z0ObsAO1ufnl2Px5b9uJ1c,2036
+cryptography/hazmat/primitives/constant_time.py,sha256=6bkW00QjhKusdgsQbexXhMlGX0XRN59XNmxWS2W38NA,387
+cryptography/hazmat/primitives/hashes.py,sha256=cwMQYC0An0UOVTFWqeDIXiokSBorSh4BwHKSWxz8HB0,6041
+cryptography/hazmat/primitives/hmac.py,sha256=pKiyxmJVcixW7Xk7w4ofde6Z7F8UohqGZa01PoxRotc,2122
+cryptography/hazmat/primitives/kdf/__init__.py,sha256=DcZhzfLG8d8IYBH771lGTVU5S87OQDpu3nrfOwZnsmA,715
+cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=giEvBnD5eAB6ymUcYKSgd_2D_qRznOUSaplS1CQoE_A,3691
+cryptography/hazmat/primitives/kdf/hkdf.py,sha256=cKBjtCVaKVRhUzbNK99HDEyWUxvPwSfOIygKAMyyb3g,3010
+cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=Ys2ITSbEw49V1v_DagQBd17owQr2A2iyPue4mot4Z_g,9196
+cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=wEMH4CJfPccCg9apQLXyWUWBrZLTpYLLnoZEnzvaHQo,2032
+cryptography/hazmat/primitives/kdf/scrypt.py,sha256=Wt7jj51vsedNtQX-LZI41geqUZnBFYnrhOXpoheLsOM,2227
+cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=bDhxg0gllboQ--a9gdDaiTbO6XMemZPlu3TW91dRp8o,1967
+cryptography/hazmat/primitives/keywrap.py,sha256=TWqyG9K7k-Ymq4kcIw7u3NIKUPVDtv6bimwxIJYTe20,5643
+cryptography/hazmat/primitives/padding.py,sha256=xruasOE5Cd8KEQ-yp9W6v9WKPvKH-GudHCPKQ7A8HfI,6207
+cryptography/hazmat/primitives/poly1305.py,sha256=QvxPMrqjgKJt0mOZSeZKk4NcxsNCd2kgfI-X1CmyUW4,1837
+cryptography/hazmat/primitives/serialization/__init__.py,sha256=G-BRfGpQzYrRf5r9QS9BN7QdgpF1k5cLCfdlMH_Z0yw,1618
+cryptography/hazmat/primitives/serialization/base.py,sha256=5ebkgRqVHpVOST3OkcWkdoqo-0sVfdIuZUoyL8tj0es,1955
+cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=p8ZWAxNEat-MRov2JaRR3TUAtcp8MAE6v4DDodNQURA,6731
+cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=AeyFKpvoll-AUHkLKMGh5lZg7xGwVWl9Y6fXyxdhXFs,7362
+cryptography/hazmat/primitives/serialization/ssh.py,sha256=Tgt8fK1F4OyZngYPINW2zor8iUosv2yrQ7qLlJrztlo,48438
+cryptography/hazmat/primitives/twofactor/__init__.py,sha256=ZHo4zwWidFP2RWFl8luiNuYkVMZPghzx54izPNSCtD4,222
+cryptography/hazmat/primitives/twofactor/hotp.py,sha256=LLMBoHczZen-hfUd6NmECPjGIc5kyys7pvbHiZOpkKE,2977
+cryptography/hazmat/primitives/twofactor/totp.py,sha256=hEsH9rd8AdTEjVjswPrB9HzRiZZSV9qfpJDmyEPppg4,1437
+cryptography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+cryptography/utils.py,sha256=y3KjdVAv2vW_42r3TJs4YGq9IMPWAjQhamV9UzPU31k,3981
+cryptography/x509/__init__.py,sha256=4WL6dxMLEWM9Wa9f_SpNbwcQNg76dut5zigRcHpRoTA,7719
+cryptography/x509/base.py,sha256=N24nEkrizgtWi5Acd1M9gCbyp8mGdw5srTq_wgTkEzo,34966
+cryptography/x509/certificate_transparency.py,sha256=jkjOvVu8bS5ljHov2AWdWScENQxylmDgESk01koC0Rs,2226
+cryptography/x509/extensions.py,sha256=XWx_WnFCu4s_GA_aalW3OZwhjkyttW6OGJsBaz3IDbs,65516
+cryptography/x509/general_name.py,sha256=EExe3dR0lBj6V8i4R_nEhj-Vj1B0UIRmRil3wqMBaDA,7853
+cryptography/x509/name.py,sha256=krFYM8XyZrEHMDFChMwVDd3D-5cq40VmSWrZF2lqXZc,14821
+cryptography/x509/ocsp.py,sha256=gfVQzFPPmUh8SYzVX000GeWNSLka6EYq3AUBvANTk8c,18513
+cryptography/x509/oid.py,sha256=dAllMplMi_Kc_lEiQKnSM-rTN5w--a1UZucV-HvQOb0,793
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/WHEEL b/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/WHEEL
new file mode 100644
index 00000000..66164400
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.40.0)
+Root-Is-Purelib: false
+Tag: cp36-abi3-macosx_10_12_universal2
+
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/top_level.txt b/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/top_level.txt
new file mode 100644
index 00000000..0d38bc5e
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography-40.0.2.dist-info/top_level.txt
@@ -0,0 +1 @@
+cryptography
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/__about__.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/__about__.py
new file mode 100644
index 00000000..ce522d5e
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/__about__.py
@@ -0,0 +1,15 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+__all__ = [
+ "__version__",
+ "__author__",
+ "__copyright__",
+]
+
+__version__ = "40.0.2"
+
+__author__ = "The Python Cryptographic Authority and individual contributors"
+__copyright__ = f"Copyright 2013-2023 {__author__}"
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/__init__.py
new file mode 100644
index 00000000..7f8a25c6
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/__init__.py
@@ -0,0 +1,24 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import sys
+import warnings
+
+from cryptography.__about__ import __author__, __copyright__, __version__
+from cryptography.utils import CryptographyDeprecationWarning
+
+__all__ = [
+ "__version__",
+ "__author__",
+ "__copyright__",
+]
+
+if sys.version_info[:2] == (3, 6):
+ warnings.warn(
+ "Python 3.6 is no longer supported by the Python core team. "
+ "Therefore, support for it is deprecated in cryptography. The next "
+ "release of cryptography will remove support for Python 3.6.",
+ CryptographyDeprecationWarning,
+ stacklevel=2,
+ )
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/exceptions.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/exceptions.py
new file mode 100644
index 00000000..5e69c119
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/exceptions.py
@@ -0,0 +1,66 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import typing
+
+from cryptography import utils
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+
+
+class _Reasons(utils.Enum):
+ BACKEND_MISSING_INTERFACE = 0
+ UNSUPPORTED_HASH = 1
+ UNSUPPORTED_CIPHER = 2
+ UNSUPPORTED_PADDING = 3
+ UNSUPPORTED_MGF = 4
+ UNSUPPORTED_PUBLIC_KEY_ALGORITHM = 5
+ UNSUPPORTED_ELLIPTIC_CURVE = 6
+ UNSUPPORTED_SERIALIZATION = 7
+ UNSUPPORTED_X509 = 8
+ UNSUPPORTED_EXCHANGE_ALGORITHM = 9
+ UNSUPPORTED_DIFFIE_HELLMAN = 10
+ UNSUPPORTED_MAC = 11
+
+
+class UnsupportedAlgorithm(Exception):
+ def __init__(
+ self, message: str, reason: typing.Optional[_Reasons] = None
+ ) -> None:
+ super().__init__(message)
+ self._reason = reason
+
+
+class AlreadyFinalized(Exception):
+ pass
+
+
+class AlreadyUpdated(Exception):
+ pass
+
+
+class NotYetFinalized(Exception):
+ pass
+
+
+class InvalidTag(Exception):
+ pass
+
+
+class InvalidSignature(Exception):
+ pass
+
+
+class InternalError(Exception):
+ def __init__(
+ self, msg: str, err_code: typing.List["rust_openssl.OpenSSLError"]
+ ) -> None:
+ super().__init__(msg)
+ self.err_code = err_code
+
+
+class InvalidKey(Exception):
+ pass
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/fernet.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/fernet.py
new file mode 100644
index 00000000..a2601f80
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/fernet.py
@@ -0,0 +1,220 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import base64
+import binascii
+import os
+import time
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import InvalidSignature
+from cryptography.hazmat.primitives import hashes, padding
+from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
+from cryptography.hazmat.primitives.hmac import HMAC
+
+
+class InvalidToken(Exception):
+ pass
+
+
+_MAX_CLOCK_SKEW = 60
+
+
+class Fernet:
+ def __init__(
+ self,
+ key: typing.Union[bytes, str],
+ backend: typing.Any = None,
+ ) -> None:
+ try:
+ key = base64.urlsafe_b64decode(key)
+ except binascii.Error as exc:
+ raise ValueError(
+ "Fernet key must be 32 url-safe base64-encoded bytes."
+ ) from exc
+ if len(key) != 32:
+ raise ValueError(
+ "Fernet key must be 32 url-safe base64-encoded bytes."
+ )
+
+ self._signing_key = key[:16]
+ self._encryption_key = key[16:]
+
+ @classmethod
+ def generate_key(cls) -> bytes:
+ return base64.urlsafe_b64encode(os.urandom(32))
+
+ def encrypt(self, data: bytes) -> bytes:
+ return self.encrypt_at_time(data, int(time.time()))
+
+ def encrypt_at_time(self, data: bytes, current_time: int) -> bytes:
+ iv = os.urandom(16)
+ return self._encrypt_from_parts(data, current_time, iv)
+
+ def _encrypt_from_parts(
+ self, data: bytes, current_time: int, iv: bytes
+ ) -> bytes:
+ utils._check_bytes("data", data)
+
+ padder = padding.PKCS7(algorithms.AES.block_size).padder()
+ padded_data = padder.update(data) + padder.finalize()
+ encryptor = Cipher(
+ algorithms.AES(self._encryption_key),
+ modes.CBC(iv),
+ ).encryptor()
+ ciphertext = encryptor.update(padded_data) + encryptor.finalize()
+
+ basic_parts = (
+ b"\x80"
+ + current_time.to_bytes(length=8, byteorder="big")
+ + iv
+ + ciphertext
+ )
+
+ h = HMAC(self._signing_key, hashes.SHA256())
+ h.update(basic_parts)
+ hmac = h.finalize()
+ return base64.urlsafe_b64encode(basic_parts + hmac)
+
+ def decrypt(
+ self, token: typing.Union[bytes, str], ttl: typing.Optional[int] = None
+ ) -> bytes:
+ timestamp, data = Fernet._get_unverified_token_data(token)
+ if ttl is None:
+ time_info = None
+ else:
+ time_info = (ttl, int(time.time()))
+ return self._decrypt_data(data, timestamp, time_info)
+
+ def decrypt_at_time(
+ self, token: typing.Union[bytes, str], ttl: int, current_time: int
+ ) -> bytes:
+ if ttl is None:
+ raise ValueError(
+ "decrypt_at_time() can only be used with a non-None ttl"
+ )
+ timestamp, data = Fernet._get_unverified_token_data(token)
+ return self._decrypt_data(data, timestamp, (ttl, current_time))
+
+ def extract_timestamp(self, token: typing.Union[bytes, str]) -> int:
+ timestamp, data = Fernet._get_unverified_token_data(token)
+ # Verify the token was not tampered with.
+ self._verify_signature(data)
+ return timestamp
+
+ @staticmethod
+ def _get_unverified_token_data(
+ token: typing.Union[bytes, str]
+ ) -> typing.Tuple[int, bytes]:
+ if not isinstance(token, (str, bytes)):
+ raise TypeError("token must be bytes or str")
+
+ try:
+ data = base64.urlsafe_b64decode(token)
+ except (TypeError, binascii.Error):
+ raise InvalidToken
+
+ if not data or data[0] != 0x80:
+ raise InvalidToken
+
+ if len(data) < 9:
+ raise InvalidToken
+
+ timestamp = int.from_bytes(data[1:9], byteorder="big")
+ return timestamp, data
+
+ def _verify_signature(self, data: bytes) -> None:
+ h = HMAC(self._signing_key, hashes.SHA256())
+ h.update(data[:-32])
+ try:
+ h.verify(data[-32:])
+ except InvalidSignature:
+ raise InvalidToken
+
+ def _decrypt_data(
+ self,
+ data: bytes,
+ timestamp: int,
+ time_info: typing.Optional[typing.Tuple[int, int]],
+ ) -> bytes:
+ if time_info is not None:
+ ttl, current_time = time_info
+ if timestamp + ttl < current_time:
+ raise InvalidToken
+
+ if current_time + _MAX_CLOCK_SKEW < timestamp:
+ raise InvalidToken
+
+ self._verify_signature(data)
+
+ iv = data[9:25]
+ ciphertext = data[25:-32]
+ decryptor = Cipher(
+ algorithms.AES(self._encryption_key), modes.CBC(iv)
+ ).decryptor()
+ plaintext_padded = decryptor.update(ciphertext)
+ try:
+ plaintext_padded += decryptor.finalize()
+ except ValueError:
+ raise InvalidToken
+ unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()
+
+ unpadded = unpadder.update(plaintext_padded)
+ try:
+ unpadded += unpadder.finalize()
+ except ValueError:
+ raise InvalidToken
+ return unpadded
+
+
+class MultiFernet:
+ def __init__(self, fernets: typing.Iterable[Fernet]):
+ fernets = list(fernets)
+ if not fernets:
+ raise ValueError(
+ "MultiFernet requires at least one Fernet instance"
+ )
+ self._fernets = fernets
+
+ def encrypt(self, msg: bytes) -> bytes:
+ return self.encrypt_at_time(msg, int(time.time()))
+
+ def encrypt_at_time(self, msg: bytes, current_time: int) -> bytes:
+ return self._fernets[0].encrypt_at_time(msg, current_time)
+
+ def rotate(self, msg: typing.Union[bytes, str]) -> bytes:
+ timestamp, data = Fernet._get_unverified_token_data(msg)
+ for f in self._fernets:
+ try:
+ p = f._decrypt_data(data, timestamp, None)
+ break
+ except InvalidToken:
+ pass
+ else:
+ raise InvalidToken
+
+ iv = os.urandom(16)
+ return self._fernets[0]._encrypt_from_parts(p, timestamp, iv)
+
+ def decrypt(
+ self, msg: typing.Union[bytes, str], ttl: typing.Optional[int] = None
+ ) -> bytes:
+ for f in self._fernets:
+ try:
+ return f.decrypt(msg, ttl)
+ except InvalidToken:
+ pass
+ raise InvalidToken
+
+ def decrypt_at_time(
+ self, msg: typing.Union[bytes, str], ttl: int, current_time: int
+ ) -> bytes:
+ for f in self._fernets:
+ try:
+ return f.decrypt_at_time(msg, ttl, current_time)
+ except InvalidToken:
+ pass
+ raise InvalidToken
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/__init__.py
new file mode 100644
index 00000000..007694bc
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/__init__.py
@@ -0,0 +1,10 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+"""
+Hazardous Materials
+
+This is a "Hazardous Materials" module. You should ONLY use it if you're
+100% absolutely sure that you know what you're doing because this module
+is full of land mines, dragons, and dinosaurs with laser guns.
+"""
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/_oid.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/_oid.py
new file mode 100644
index 00000000..927ffc4c
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/_oid.py
@@ -0,0 +1,293 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.hazmat.bindings._rust import (
+ ObjectIdentifier as ObjectIdentifier,
+)
+from cryptography.hazmat.primitives import hashes
+
+
+class ExtensionOID:
+ SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9")
+ SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14")
+ KEY_USAGE = ObjectIdentifier("2.5.29.15")
+ SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17")
+ ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18")
+ BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19")
+ NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30")
+ CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31")
+ CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32")
+ POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33")
+ AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35")
+ POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36")
+ EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37")
+ FRESHEST_CRL = ObjectIdentifier("2.5.29.46")
+ INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54")
+ ISSUING_DISTRIBUTION_POINT = ObjectIdentifier("2.5.29.28")
+ AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1")
+ SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11")
+ OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5")
+ TLS_FEATURE = ObjectIdentifier("1.3.6.1.5.5.7.1.24")
+ CRL_NUMBER = ObjectIdentifier("2.5.29.20")
+ DELTA_CRL_INDICATOR = ObjectIdentifier("2.5.29.27")
+ PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier(
+ "1.3.6.1.4.1.11129.2.4.2"
+ )
+ PRECERT_POISON = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3")
+ SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.5")
+
+
+class OCSPExtensionOID:
+ NONCE = ObjectIdentifier("1.3.6.1.5.5.7.48.1.2")
+
+
+class CRLEntryExtensionOID:
+ CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29")
+ CRL_REASON = ObjectIdentifier("2.5.29.21")
+ INVALIDITY_DATE = ObjectIdentifier("2.5.29.24")
+
+
+class NameOID:
+ COMMON_NAME = ObjectIdentifier("2.5.4.3")
+ COUNTRY_NAME = ObjectIdentifier("2.5.4.6")
+ LOCALITY_NAME = ObjectIdentifier("2.5.4.7")
+ STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8")
+ STREET_ADDRESS = ObjectIdentifier("2.5.4.9")
+ ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10")
+ ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11")
+ SERIAL_NUMBER = ObjectIdentifier("2.5.4.5")
+ SURNAME = ObjectIdentifier("2.5.4.4")
+ GIVEN_NAME = ObjectIdentifier("2.5.4.42")
+ TITLE = ObjectIdentifier("2.5.4.12")
+ GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44")
+ X500_UNIQUE_IDENTIFIER = ObjectIdentifier("2.5.4.45")
+ DN_QUALIFIER = ObjectIdentifier("2.5.4.46")
+ PSEUDONYM = ObjectIdentifier("2.5.4.65")
+ USER_ID = ObjectIdentifier("0.9.2342.19200300.100.1.1")
+ DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25")
+ EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1")
+ JURISDICTION_COUNTRY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.3")
+ JURISDICTION_LOCALITY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.1")
+ JURISDICTION_STATE_OR_PROVINCE_NAME = ObjectIdentifier(
+ "1.3.6.1.4.1.311.60.2.1.2"
+ )
+ BUSINESS_CATEGORY = ObjectIdentifier("2.5.4.15")
+ POSTAL_ADDRESS = ObjectIdentifier("2.5.4.16")
+ POSTAL_CODE = ObjectIdentifier("2.5.4.17")
+ INN = ObjectIdentifier("1.2.643.3.131.1.1")
+ OGRN = ObjectIdentifier("1.2.643.100.1")
+ SNILS = ObjectIdentifier("1.2.643.100.3")
+ UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2")
+
+
+class SignatureAlgorithmOID:
+ RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4")
+ RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5")
+ # This is an alternate OID for RSA with SHA1 that is occasionally seen
+ _RSA_WITH_SHA1 = ObjectIdentifier("1.3.14.3.2.29")
+ RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14")
+ RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11")
+ RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12")
+ RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13")
+ RSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.13")
+ RSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.14")
+ RSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.15")
+ RSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.16")
+ RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10")
+ ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1")
+ ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1")
+ ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2")
+ ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3")
+ ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4")
+ ECDSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.9")
+ ECDSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.10")
+ ECDSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.11")
+ ECDSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.12")
+ DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3")
+ DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1")
+ DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2")
+ DSA_WITH_SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.3.3")
+ DSA_WITH_SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.3.4")
+ ED25519 = ObjectIdentifier("1.3.101.112")
+ ED448 = ObjectIdentifier("1.3.101.113")
+ GOSTR3411_94_WITH_3410_2001 = ObjectIdentifier("1.2.643.2.2.3")
+ GOSTR3410_2012_WITH_3411_2012_256 = ObjectIdentifier("1.2.643.7.1.1.3.2")
+ GOSTR3410_2012_WITH_3411_2012_512 = ObjectIdentifier("1.2.643.7.1.1.3.3")
+
+
+_SIG_OIDS_TO_HASH: typing.Dict[
+ ObjectIdentifier, typing.Optional[hashes.HashAlgorithm]
+] = {
+ SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(),
+ SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(),
+ SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(),
+ SignatureAlgorithmOID.RSA_WITH_SHA224: hashes.SHA224(),
+ SignatureAlgorithmOID.RSA_WITH_SHA256: hashes.SHA256(),
+ SignatureAlgorithmOID.RSA_WITH_SHA384: hashes.SHA384(),
+ SignatureAlgorithmOID.RSA_WITH_SHA512: hashes.SHA512(),
+ SignatureAlgorithmOID.RSA_WITH_SHA3_224: hashes.SHA3_224(),
+ SignatureAlgorithmOID.RSA_WITH_SHA3_256: hashes.SHA3_256(),
+ SignatureAlgorithmOID.RSA_WITH_SHA3_384: hashes.SHA3_384(),
+ SignatureAlgorithmOID.RSA_WITH_SHA3_512: hashes.SHA3_512(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA1: hashes.SHA1(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA224: hashes.SHA224(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA256: hashes.SHA256(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA384: hashes.SHA384(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA512: hashes.SHA512(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA3_224: hashes.SHA3_224(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA3_256: hashes.SHA3_256(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA3_384: hashes.SHA3_384(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA3_512: hashes.SHA3_512(),
+ SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(),
+ SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(),
+ SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256(),
+ SignatureAlgorithmOID.ED25519: None,
+ SignatureAlgorithmOID.ED448: None,
+ SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: None,
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: None,
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: None,
+}
+
+
+class ExtendedKeyUsageOID:
+ SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1")
+ CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2")
+ CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3")
+ EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4")
+ TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8")
+ OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9")
+ ANY_EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37.0")
+ SMARTCARD_LOGON = ObjectIdentifier("1.3.6.1.4.1.311.20.2.2")
+ KERBEROS_PKINIT_KDC = ObjectIdentifier("1.3.6.1.5.2.3.5")
+ IPSEC_IKE = ObjectIdentifier("1.3.6.1.5.5.7.3.17")
+ CERTIFICATE_TRANSPARENCY = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.4")
+
+
+class AuthorityInformationAccessOID:
+ CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2")
+ OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1")
+
+
+class SubjectInformationAccessOID:
+ CA_REPOSITORY = ObjectIdentifier("1.3.6.1.5.5.7.48.5")
+
+
+class CertificatePoliciesOID:
+ CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1")
+ CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2")
+ ANY_POLICY = ObjectIdentifier("2.5.29.32.0")
+
+
+class AttributeOID:
+ CHALLENGE_PASSWORD = ObjectIdentifier("1.2.840.113549.1.9.7")
+ UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2")
+
+
+_OID_NAMES = {
+ NameOID.COMMON_NAME: "commonName",
+ NameOID.COUNTRY_NAME: "countryName",
+ NameOID.LOCALITY_NAME: "localityName",
+ NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName",
+ NameOID.STREET_ADDRESS: "streetAddress",
+ NameOID.ORGANIZATION_NAME: "organizationName",
+ NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName",
+ NameOID.SERIAL_NUMBER: "serialNumber",
+ NameOID.SURNAME: "surname",
+ NameOID.GIVEN_NAME: "givenName",
+ NameOID.TITLE: "title",
+ NameOID.GENERATION_QUALIFIER: "generationQualifier",
+ NameOID.X500_UNIQUE_IDENTIFIER: "x500UniqueIdentifier",
+ NameOID.DN_QUALIFIER: "dnQualifier",
+ NameOID.PSEUDONYM: "pseudonym",
+ NameOID.USER_ID: "userID",
+ NameOID.DOMAIN_COMPONENT: "domainComponent",
+ NameOID.EMAIL_ADDRESS: "emailAddress",
+ NameOID.JURISDICTION_COUNTRY_NAME: "jurisdictionCountryName",
+ NameOID.JURISDICTION_LOCALITY_NAME: "jurisdictionLocalityName",
+ NameOID.JURISDICTION_STATE_OR_PROVINCE_NAME: (
+ "jurisdictionStateOrProvinceName"
+ ),
+ NameOID.BUSINESS_CATEGORY: "businessCategory",
+ NameOID.POSTAL_ADDRESS: "postalAddress",
+ NameOID.POSTAL_CODE: "postalCode",
+ NameOID.INN: "INN",
+ NameOID.OGRN: "OGRN",
+ NameOID.SNILS: "SNILS",
+ NameOID.UNSTRUCTURED_NAME: "unstructuredName",
+ SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption",
+ SignatureAlgorithmOID.RSASSA_PSS: "RSASSA-PSS",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512",
+ SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1",
+ SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224",
+ SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256",
+ SignatureAlgorithmOID.ED25519: "ed25519",
+ SignatureAlgorithmOID.ED448: "ed448",
+ SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: (
+ "GOST R 34.11-94 with GOST R 34.10-2001"
+ ),
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: (
+ "GOST R 34.10-2012 with GOST R 34.11-2012 (256 bit)"
+ ),
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: (
+ "GOST R 34.10-2012 with GOST R 34.11-2012 (512 bit)"
+ ),
+ ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth",
+ ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth",
+ ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning",
+ ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection",
+ ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping",
+ ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning",
+ ExtendedKeyUsageOID.SMARTCARD_LOGON: "msSmartcardLogin",
+ ExtendedKeyUsageOID.KERBEROS_PKINIT_KDC: "pkInitKDC",
+ ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes",
+ ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier",
+ ExtensionOID.KEY_USAGE: "keyUsage",
+ ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName",
+ ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName",
+ ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints",
+ ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: (
+ "signedCertificateTimestampList"
+ ),
+ ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS: (
+ "signedCertificateTimestampList"
+ ),
+ ExtensionOID.PRECERT_POISON: "ctPoison",
+ CRLEntryExtensionOID.CRL_REASON: "cRLReason",
+ CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate",
+ CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer",
+ ExtensionOID.NAME_CONSTRAINTS: "nameConstraints",
+ ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints",
+ ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies",
+ ExtensionOID.POLICY_MAPPINGS: "policyMappings",
+ ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier",
+ ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints",
+ ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage",
+ ExtensionOID.FRESHEST_CRL: "freshestCRL",
+ ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy",
+ ExtensionOID.ISSUING_DISTRIBUTION_POINT: ("issuingDistributionPoint"),
+ ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess",
+ ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess",
+ ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck",
+ ExtensionOID.CRL_NUMBER: "cRLNumber",
+ ExtensionOID.DELTA_CRL_INDICATOR: "deltaCRLIndicator",
+ ExtensionOID.TLS_FEATURE: "TLSFeature",
+ AuthorityInformationAccessOID.OCSP: "OCSP",
+ AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers",
+ SubjectInformationAccessOID.CA_REPOSITORY: "caRepository",
+ CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps",
+ CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice",
+ OCSPExtensionOID.NONCE: "OCSPNonce",
+ AttributeOID.CHALLENGE_PASSWORD: "challengePassword",
+}
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/__init__.py
new file mode 100644
index 00000000..3926f85f
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/__init__.py
@@ -0,0 +1,10 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from typing import Any
+
+
+def default_backend() -> Any:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/__init__.py
new file mode 100644
index 00000000..42c4539d
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/__init__.py
@@ -0,0 +1,8 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+from cryptography.hazmat.backends.openssl.backend import backend
+
+__all__ = ["backend"]
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/aead.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/aead.py
new file mode 100644
index 00000000..d43deb43
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/aead.py
@@ -0,0 +1,310 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.exceptions import InvalidTag
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+ from cryptography.hazmat.primitives.ciphers.aead import (
+ AESCCM,
+ AESGCM,
+ AESOCB3,
+ AESSIV,
+ ChaCha20Poly1305,
+ )
+
+ _AEADTypes = typing.Union[
+ AESCCM, AESGCM, AESOCB3, AESSIV, ChaCha20Poly1305
+ ]
+
+_ENCRYPT = 1
+_DECRYPT = 0
+
+
+def _aead_cipher_name(cipher: "_AEADTypes") -> bytes:
+ from cryptography.hazmat.primitives.ciphers.aead import (
+ AESCCM,
+ AESGCM,
+ AESOCB3,
+ AESSIV,
+ ChaCha20Poly1305,
+ )
+
+ if isinstance(cipher, ChaCha20Poly1305):
+ return b"chacha20-poly1305"
+ elif isinstance(cipher, AESCCM):
+ return f"aes-{len(cipher._key) * 8}-ccm".encode("ascii")
+ elif isinstance(cipher, AESOCB3):
+ return f"aes-{len(cipher._key) * 8}-ocb".encode("ascii")
+ elif isinstance(cipher, AESSIV):
+ return f"aes-{len(cipher._key) * 8 // 2}-siv".encode("ascii")
+ else:
+ assert isinstance(cipher, AESGCM)
+ return f"aes-{len(cipher._key) * 8}-gcm".encode("ascii")
+
+
+def _evp_cipher(cipher_name: bytes, backend: "Backend"):
+ if cipher_name.endswith(b"-siv"):
+ evp_cipher = backend._lib.EVP_CIPHER_fetch(
+ backend._ffi.NULL,
+ cipher_name,
+ backend._ffi.NULL,
+ )
+ backend.openssl_assert(evp_cipher != backend._ffi.NULL)
+ evp_cipher = backend._ffi.gc(evp_cipher, backend._lib.EVP_CIPHER_free)
+ else:
+ evp_cipher = backend._lib.EVP_get_cipherbyname(cipher_name)
+ backend.openssl_assert(evp_cipher != backend._ffi.NULL)
+
+ return evp_cipher
+
+
+def _aead_create_ctx(
+ backend: "Backend",
+ cipher: "_AEADTypes",
+ key: bytes,
+):
+ ctx = backend._lib.EVP_CIPHER_CTX_new()
+ backend.openssl_assert(ctx != backend._ffi.NULL)
+ ctx = backend._ffi.gc(ctx, backend._lib.EVP_CIPHER_CTX_free)
+ cipher_name = _aead_cipher_name(cipher)
+ evp_cipher = _evp_cipher(cipher_name, backend)
+ key_ptr = backend._ffi.from_buffer(key)
+ res = backend._lib.EVP_CipherInit_ex(
+ ctx,
+ evp_cipher,
+ backend._ffi.NULL,
+ key_ptr,
+ backend._ffi.NULL,
+ 0,
+ )
+ backend.openssl_assert(res != 0)
+ return ctx
+
+
+def _aead_setup(
+ backend: "Backend",
+ cipher_name: bytes,
+ key: bytes,
+ nonce: bytes,
+ tag: typing.Optional[bytes],
+ tag_len: int,
+ operation: int,
+):
+ evp_cipher = _evp_cipher(cipher_name, backend)
+ ctx = backend._lib.EVP_CIPHER_CTX_new()
+ ctx = backend._ffi.gc(ctx, backend._lib.EVP_CIPHER_CTX_free)
+ res = backend._lib.EVP_CipherInit_ex(
+ ctx,
+ evp_cipher,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ int(operation == _ENCRYPT),
+ )
+ backend.openssl_assert(res != 0)
+ # CCM requires the IVLEN to be set before calling SET_TAG on decrypt
+ res = backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx,
+ backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
+ len(nonce),
+ backend._ffi.NULL,
+ )
+ backend.openssl_assert(res != 0)
+ if operation == _DECRYPT:
+ assert tag is not None
+ _set_tag(backend, ctx, tag)
+ elif cipher_name.endswith(b"-ccm"):
+ res = backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, tag_len, backend._ffi.NULL
+ )
+ backend.openssl_assert(res != 0)
+
+ nonce_ptr = backend._ffi.from_buffer(nonce)
+ key_ptr = backend._ffi.from_buffer(key)
+ res = backend._lib.EVP_CipherInit_ex(
+ ctx,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ key_ptr,
+ nonce_ptr,
+ int(operation == _ENCRYPT),
+ )
+ backend.openssl_assert(res != 0)
+ return ctx
+
+
+def _set_tag(backend, ctx, tag: bytes) -> None:
+ tag_ptr = backend._ffi.from_buffer(tag)
+ res = backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag_ptr
+ )
+ backend.openssl_assert(res != 0)
+
+
+def _set_nonce_operation(backend, ctx, nonce: bytes, operation: int) -> None:
+ nonce_ptr = backend._ffi.from_buffer(nonce)
+ res = backend._lib.EVP_CipherInit_ex(
+ ctx,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ nonce_ptr,
+ int(operation == _ENCRYPT),
+ )
+ backend.openssl_assert(res != 0)
+
+
+def _set_length(backend: "Backend", ctx, data_len: int) -> None:
+ intptr = backend._ffi.new("int *")
+ res = backend._lib.EVP_CipherUpdate(
+ ctx, backend._ffi.NULL, intptr, backend._ffi.NULL, data_len
+ )
+ backend.openssl_assert(res != 0)
+
+
+def _process_aad(backend: "Backend", ctx, associated_data: bytes) -> None:
+ outlen = backend._ffi.new("int *")
+ a_data_ptr = backend._ffi.from_buffer(associated_data)
+ res = backend._lib.EVP_CipherUpdate(
+ ctx, backend._ffi.NULL, outlen, a_data_ptr, len(associated_data)
+ )
+ backend.openssl_assert(res != 0)
+
+
+def _process_data(backend: "Backend", ctx, data: bytes) -> bytes:
+ outlen = backend._ffi.new("int *")
+ buf = backend._ffi.new("unsigned char[]", len(data))
+ data_ptr = backend._ffi.from_buffer(data)
+ res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, data_ptr, len(data))
+ if res == 0:
+ # AES SIV can error here if the data is invalid on decrypt
+ backend._consume_errors()
+ raise InvalidTag
+ return backend._ffi.buffer(buf, outlen[0])[:]
+
+
+def _encrypt(
+ backend: "Backend",
+ cipher: "_AEADTypes",
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.List[bytes],
+ tag_length: int,
+ ctx: typing.Any = None,
+) -> bytes:
+ from cryptography.hazmat.primitives.ciphers.aead import AESCCM, AESSIV
+
+ if ctx is None:
+ cipher_name = _aead_cipher_name(cipher)
+ ctx = _aead_setup(
+ backend,
+ cipher_name,
+ cipher._key,
+ nonce,
+ None,
+ tag_length,
+ _ENCRYPT,
+ )
+ else:
+ _set_nonce_operation(backend, ctx, nonce, _ENCRYPT)
+
+ # CCM requires us to pass the length of the data before processing anything
+ # However calling this with any other AEAD results in an error
+ if isinstance(cipher, AESCCM):
+ _set_length(backend, ctx, len(data))
+
+ for ad in associated_data:
+ _process_aad(backend, ctx, ad)
+ processed_data = _process_data(backend, ctx, data)
+ outlen = backend._ffi.new("int *")
+ # All AEADs we support besides OCB are streaming so they return nothing
+ # in finalization. OCB can return up to (16 byte block - 1) bytes so
+ # we need a buffer here too.
+ buf = backend._ffi.new("unsigned char[]", 16)
+ res = backend._lib.EVP_CipherFinal_ex(ctx, buf, outlen)
+ backend.openssl_assert(res != 0)
+ processed_data += backend._ffi.buffer(buf, outlen[0])[:]
+ tag_buf = backend._ffi.new("unsigned char[]", tag_length)
+ res = backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx, backend._lib.EVP_CTRL_AEAD_GET_TAG, tag_length, tag_buf
+ )
+ backend.openssl_assert(res != 0)
+ tag = backend._ffi.buffer(tag_buf)[:]
+
+ if isinstance(cipher, AESSIV):
+ # RFC 5297 defines the output as IV || C, where the tag we generate is
+ # the "IV" and C is the ciphertext. This is the opposite of our
+ # other AEADs, which are Ciphertext || Tag
+ backend.openssl_assert(len(tag) == 16)
+ return tag + processed_data
+ else:
+ return processed_data + tag
+
+
+def _decrypt(
+ backend: "Backend",
+ cipher: "_AEADTypes",
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.List[bytes],
+ tag_length: int,
+ ctx: typing.Any = None,
+) -> bytes:
+ from cryptography.hazmat.primitives.ciphers.aead import AESCCM, AESSIV
+
+ if len(data) < tag_length:
+ raise InvalidTag
+
+ if isinstance(cipher, AESSIV):
+ # RFC 5297 defines the output as IV || C, where the tag we generate is
+ # the "IV" and C is the ciphertext. This is the opposite of our
+ # other AEADs, which are Ciphertext || Tag
+ tag = data[:tag_length]
+ data = data[tag_length:]
+ else:
+ tag = data[-tag_length:]
+ data = data[:-tag_length]
+ if ctx is None:
+ cipher_name = _aead_cipher_name(cipher)
+ ctx = _aead_setup(
+ backend, cipher_name, cipher._key, nonce, tag, tag_length, _DECRYPT
+ )
+ else:
+ _set_nonce_operation(backend, ctx, nonce, _DECRYPT)
+ _set_tag(backend, ctx, tag)
+
+ # CCM requires us to pass the length of the data before processing anything
+ # However calling this with any other AEAD results in an error
+ if isinstance(cipher, AESCCM):
+ _set_length(backend, ctx, len(data))
+
+ for ad in associated_data:
+ _process_aad(backend, ctx, ad)
+ # CCM has a different error path if the tag doesn't match. Errors are
+ # raised in Update and Final is irrelevant.
+ if isinstance(cipher, AESCCM):
+ outlen = backend._ffi.new("int *")
+ buf = backend._ffi.new("unsigned char[]", len(data))
+ d_ptr = backend._ffi.from_buffer(data)
+ res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, d_ptr, len(data))
+ if res != 1:
+ backend._consume_errors()
+ raise InvalidTag
+
+ processed_data = backend._ffi.buffer(buf, outlen[0])[:]
+ else:
+ processed_data = _process_data(backend, ctx, data)
+ outlen = backend._ffi.new("int *")
+ # OCB can return up to 15 bytes (16 byte block - 1) in finalization
+ buf = backend._ffi.new("unsigned char[]", 16)
+ res = backend._lib.EVP_CipherFinal_ex(ctx, buf, outlen)
+ processed_data += backend._ffi.buffer(buf, outlen[0])[:]
+ if res == 0:
+ backend._consume_errors()
+ raise InvalidTag
+
+ return processed_data
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/backend.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/backend.py
new file mode 100644
index 00000000..a3fe1bce
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/backend.py
@@ -0,0 +1,2428 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import collections
+import contextlib
+import itertools
+import typing
+import warnings
+from contextlib import contextmanager
+
+from cryptography import utils, x509
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.backends.openssl import aead
+from cryptography.hazmat.backends.openssl.ciphers import _CipherContext
+from cryptography.hazmat.backends.openssl.cmac import _CMACContext
+from cryptography.hazmat.backends.openssl.dh import (
+ _dh_params_dup,
+ _DHParameters,
+ _DHPrivateKey,
+ _DHPublicKey,
+)
+from cryptography.hazmat.backends.openssl.dsa import (
+ _DSAParameters,
+ _DSAPrivateKey,
+ _DSAPublicKey,
+)
+from cryptography.hazmat.backends.openssl.ec import (
+ _EllipticCurvePrivateKey,
+ _EllipticCurvePublicKey,
+)
+from cryptography.hazmat.backends.openssl.ed448 import (
+ _ED448_KEY_SIZE,
+ _Ed448PrivateKey,
+ _Ed448PublicKey,
+)
+from cryptography.hazmat.backends.openssl.ed25519 import (
+ _Ed25519PrivateKey,
+ _Ed25519PublicKey,
+)
+from cryptography.hazmat.backends.openssl.hashes import _HashContext
+from cryptography.hazmat.backends.openssl.hmac import _HMACContext
+from cryptography.hazmat.backends.openssl.poly1305 import (
+ _POLY1305_KEY_SIZE,
+ _Poly1305Context,
+)
+from cryptography.hazmat.backends.openssl.rsa import (
+ _RSAPrivateKey,
+ _RSAPublicKey,
+)
+from cryptography.hazmat.backends.openssl.x448 import (
+ _X448PrivateKey,
+ _X448PublicKey,
+)
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.bindings.openssl import binding
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding
+from cryptography.hazmat.primitives.asymmetric import (
+ dh,
+ dsa,
+ ec,
+ ed448,
+ ed25519,
+ rsa,
+ x448,
+ x25519,
+)
+from cryptography.hazmat.primitives.asymmetric.padding import (
+ MGF1,
+ OAEP,
+ PSS,
+ PKCS1v15,
+)
+from cryptography.hazmat.primitives.asymmetric.types import (
+ PrivateKeyTypes,
+ PublicKeyTypes,
+)
+from cryptography.hazmat.primitives.ciphers import (
+ BlockCipherAlgorithm,
+ CipherAlgorithm,
+)
+from cryptography.hazmat.primitives.ciphers.algorithms import (
+ AES,
+ AES128,
+ AES256,
+ ARC4,
+ SM4,
+ Camellia,
+ ChaCha20,
+ TripleDES,
+ _BlowfishInternal,
+ _CAST5Internal,
+ _IDEAInternal,
+ _SEEDInternal,
+)
+from cryptography.hazmat.primitives.ciphers.modes import (
+ CBC,
+ CFB,
+ CFB8,
+ CTR,
+ ECB,
+ GCM,
+ OFB,
+ XTS,
+ Mode,
+)
+from cryptography.hazmat.primitives.kdf import scrypt
+from cryptography.hazmat.primitives.serialization import ssh
+from cryptography.hazmat.primitives.serialization.pkcs12 import (
+ PBES,
+ PKCS12Certificate,
+ PKCS12KeyAndCertificates,
+ PKCS12PrivateKeyTypes,
+ _PKCS12CATypes,
+)
+
+_MemoryBIO = collections.namedtuple("_MemoryBIO", ["bio", "char_ptr"])
+
+
+# Not actually supported, just used as a marker for some serialization tests.
+class _RC2:
+ pass
+
+
+class Backend:
+ """
+ OpenSSL API binding interfaces.
+ """
+
+ name = "openssl"
+
+ # FIPS has opinions about acceptable algorithms and key sizes, but the
+ # disallowed algorithms are still present in OpenSSL. They just error if
+ # you try to use them. To avoid that we allowlist the algorithms in
+ # FIPS 140-3. This isn't ideal, but FIPS 140-3 is trash so here we are.
+ _fips_aead = {
+ b"aes-128-ccm",
+ b"aes-192-ccm",
+ b"aes-256-ccm",
+ b"aes-128-gcm",
+ b"aes-192-gcm",
+ b"aes-256-gcm",
+ }
+ # TripleDES encryption is disallowed/deprecated throughout 2023 in
+ # FIPS 140-3. To keep it simple we denylist any use of TripleDES (TDEA).
+ _fips_ciphers = (AES,)
+ # Sometimes SHA1 is still permissible. That logic is contained
+ # within the various *_supported methods.
+ _fips_hashes = (
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+ hashes.SHA512_224,
+ hashes.SHA512_256,
+ hashes.SHA3_224,
+ hashes.SHA3_256,
+ hashes.SHA3_384,
+ hashes.SHA3_512,
+ hashes.SHAKE128,
+ hashes.SHAKE256,
+ )
+ _fips_ecdh_curves = (
+ ec.SECP224R1,
+ ec.SECP256R1,
+ ec.SECP384R1,
+ ec.SECP521R1,
+ )
+ _fips_rsa_min_key_size = 2048
+ _fips_rsa_min_public_exponent = 65537
+ _fips_dsa_min_modulus = 1 << 2048
+ _fips_dh_min_key_size = 2048
+ _fips_dh_min_modulus = 1 << _fips_dh_min_key_size
+
+ def __init__(self) -> None:
+ self._binding = binding.Binding()
+ self._ffi = self._binding.ffi
+ self._lib = self._binding.lib
+ self._fips_enabled = self._is_fips_enabled()
+
+ self._cipher_registry: typing.Dict[
+ typing.Tuple[typing.Type[CipherAlgorithm], typing.Type[Mode]],
+ typing.Callable,
+ ] = {}
+ self._register_default_ciphers()
+ if self._fips_enabled and self._lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE:
+ warnings.warn(
+ "OpenSSL FIPS mode is enabled. Can't enable DRBG fork safety.",
+ UserWarning,
+ )
+ else:
+ self.activate_osrandom_engine()
+ self._dh_types = [self._lib.EVP_PKEY_DH]
+ if self._lib.Cryptography_HAS_EVP_PKEY_DHX:
+ self._dh_types.append(self._lib.EVP_PKEY_DHX)
+
+ def __repr__(self) -> str:
+ return "".format(
+ self.openssl_version_text(),
+ self._fips_enabled,
+ self._binding._legacy_provider_loaded,
+ )
+
+ def openssl_assert(
+ self,
+ ok: bool,
+ errors: typing.Optional[typing.List[rust_openssl.OpenSSLError]] = None,
+ ) -> None:
+ return binding._openssl_assert(self._lib, ok, errors=errors)
+
+ def _is_fips_enabled(self) -> bool:
+ if self._lib.Cryptography_HAS_300_FIPS:
+ mode = self._lib.EVP_default_properties_is_fips_enabled(
+ self._ffi.NULL
+ )
+ else:
+ mode = self._lib.FIPS_mode()
+
+ if mode == 0:
+ # OpenSSL without FIPS pushes an error on the error stack
+ self._lib.ERR_clear_error()
+ return bool(mode)
+
+ def _enable_fips(self) -> None:
+ # This function enables FIPS mode for OpenSSL 3.0.0 on installs that
+ # have the FIPS provider installed properly.
+ self._binding._enable_fips()
+ assert self._is_fips_enabled()
+ self._fips_enabled = self._is_fips_enabled()
+
+ def activate_builtin_random(self) -> None:
+ if self._lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE:
+ # Obtain a new structural reference.
+ e = self._lib.ENGINE_get_default_RAND()
+ if e != self._ffi.NULL:
+ self._lib.ENGINE_unregister_RAND(e)
+ # Reset the RNG to use the built-in.
+ res = self._lib.RAND_set_rand_method(self._ffi.NULL)
+ self.openssl_assert(res == 1)
+ # decrement the structural reference from get_default_RAND
+ res = self._lib.ENGINE_finish(e)
+ self.openssl_assert(res == 1)
+
+ @contextlib.contextmanager
+ def _get_osurandom_engine(self):
+ # Fetches an engine by id and returns it. This creates a structural
+ # reference.
+ e = self._lib.ENGINE_by_id(self._lib.Cryptography_osrandom_engine_id)
+ self.openssl_assert(e != self._ffi.NULL)
+ # Initialize the engine for use. This adds a functional reference.
+ res = self._lib.ENGINE_init(e)
+ self.openssl_assert(res == 1)
+
+ try:
+ yield e
+ finally:
+ # Decrement the structural ref incremented by ENGINE_by_id.
+ res = self._lib.ENGINE_free(e)
+ self.openssl_assert(res == 1)
+ # Decrement the functional ref incremented by ENGINE_init.
+ res = self._lib.ENGINE_finish(e)
+ self.openssl_assert(res == 1)
+
+ def activate_osrandom_engine(self) -> None:
+ if self._lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE:
+ # Unregister and free the current engine.
+ self.activate_builtin_random()
+ with self._get_osurandom_engine() as e:
+ # Set the engine as the default RAND provider.
+ res = self._lib.ENGINE_set_default_RAND(e)
+ self.openssl_assert(res == 1)
+ # Reset the RNG to use the engine
+ res = self._lib.RAND_set_rand_method(self._ffi.NULL)
+ self.openssl_assert(res == 1)
+
+ def osrandom_engine_implementation(self) -> str:
+ buf = self._ffi.new("char[]", 64)
+ with self._get_osurandom_engine() as e:
+ res = self._lib.ENGINE_ctrl_cmd(
+ e, b"get_implementation", len(buf), buf, self._ffi.NULL, 0
+ )
+ self.openssl_assert(res > 0)
+ return self._ffi.string(buf).decode("ascii")
+
+ def openssl_version_text(self) -> str:
+ """
+ Friendly string name of the loaded OpenSSL library. This is not
+ necessarily the same version as it was compiled against.
+
+ Example: OpenSSL 1.1.1d 10 Sep 2019
+ """
+ return self._ffi.string(
+ self._lib.OpenSSL_version(self._lib.OPENSSL_VERSION)
+ ).decode("ascii")
+
+ def openssl_version_number(self) -> int:
+ return self._lib.OpenSSL_version_num()
+
+ def create_hmac_ctx(
+ self, key: bytes, algorithm: hashes.HashAlgorithm
+ ) -> _HMACContext:
+ return _HMACContext(self, key, algorithm)
+
+ def _evp_md_from_algorithm(self, algorithm: hashes.HashAlgorithm):
+ if algorithm.name == "blake2b" or algorithm.name == "blake2s":
+ alg = "{}{}".format(
+ algorithm.name, algorithm.digest_size * 8
+ ).encode("ascii")
+ else:
+ alg = algorithm.name.encode("ascii")
+
+ evp_md = self._lib.EVP_get_digestbyname(alg)
+ return evp_md
+
+ def _evp_md_non_null_from_algorithm(self, algorithm: hashes.HashAlgorithm):
+ evp_md = self._evp_md_from_algorithm(algorithm)
+ self.openssl_assert(evp_md != self._ffi.NULL)
+ return evp_md
+
+ def hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool:
+ if self._fips_enabled and not isinstance(algorithm, self._fips_hashes):
+ return False
+
+ evp_md = self._evp_md_from_algorithm(algorithm)
+ return evp_md != self._ffi.NULL
+
+ def signature_hash_supported(
+ self, algorithm: hashes.HashAlgorithm
+ ) -> bool:
+ # Dedicated check for hashing algorithm use in message digest for
+ # signatures, e.g. RSA PKCS#1 v1.5 SHA1 (sha1WithRSAEncryption).
+ if self._fips_enabled and isinstance(algorithm, hashes.SHA1):
+ return False
+ return self.hash_supported(algorithm)
+
+ def scrypt_supported(self) -> bool:
+ if self._fips_enabled:
+ return False
+ else:
+ return self._lib.Cryptography_HAS_SCRYPT == 1
+
+ def hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool:
+ # FIPS mode still allows SHA1 for HMAC
+ if self._fips_enabled and isinstance(algorithm, hashes.SHA1):
+ return True
+
+ return self.hash_supported(algorithm)
+
+ def create_hash_ctx(
+ self, algorithm: hashes.HashAlgorithm
+ ) -> hashes.HashContext:
+ return _HashContext(self, algorithm)
+
+ def cipher_supported(self, cipher: CipherAlgorithm, mode: Mode) -> bool:
+ if self._fips_enabled:
+ # FIPS mode requires AES. TripleDES is disallowed/deprecated in
+ # FIPS 140-3.
+ if not isinstance(cipher, self._fips_ciphers):
+ return False
+
+ try:
+ adapter = self._cipher_registry[type(cipher), type(mode)]
+ except KeyError:
+ return False
+ evp_cipher = adapter(self, cipher, mode)
+ return self._ffi.NULL != evp_cipher
+
+ def register_cipher_adapter(self, cipher_cls, mode_cls, adapter) -> None:
+ if (cipher_cls, mode_cls) in self._cipher_registry:
+ raise ValueError(
+ "Duplicate registration for: {} {}.".format(
+ cipher_cls, mode_cls
+ )
+ )
+ self._cipher_registry[cipher_cls, mode_cls] = adapter
+
+ def _register_default_ciphers(self) -> None:
+ for cipher_cls in [AES, AES128, AES256]:
+ for mode_cls in [CBC, CTR, ECB, OFB, CFB, CFB8, GCM]:
+ self.register_cipher_adapter(
+ cipher_cls,
+ mode_cls,
+ GetCipherByName(
+ "{cipher.name}-{cipher.key_size}-{mode.name}"
+ ),
+ )
+ for mode_cls in [CBC, CTR, ECB, OFB, CFB]:
+ self.register_cipher_adapter(
+ Camellia,
+ mode_cls,
+ GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}"),
+ )
+ for mode_cls in [CBC, CFB, CFB8, OFB]:
+ self.register_cipher_adapter(
+ TripleDES, mode_cls, GetCipherByName("des-ede3-{mode.name}")
+ )
+ self.register_cipher_adapter(
+ TripleDES, ECB, GetCipherByName("des-ede3")
+ )
+ self.register_cipher_adapter(
+ ChaCha20, type(None), GetCipherByName("chacha20")
+ )
+ self.register_cipher_adapter(AES, XTS, _get_xts_cipher)
+ for mode_cls in [ECB, CBC, OFB, CFB, CTR]:
+ self.register_cipher_adapter(
+ SM4, mode_cls, GetCipherByName("sm4-{mode.name}")
+ )
+ # Don't register legacy ciphers if they're unavailable. Hypothetically
+ # this wouldn't be necessary because we test availability by seeing if
+ # we get an EVP_CIPHER * in the _CipherContext __init__, but OpenSSL 3
+ # will return a valid pointer even though the cipher is unavailable.
+ if (
+ self._binding._legacy_provider_loaded
+ or not self._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER
+ ):
+ for mode_cls in [CBC, CFB, OFB, ECB]:
+ self.register_cipher_adapter(
+ _BlowfishInternal,
+ mode_cls,
+ GetCipherByName("bf-{mode.name}"),
+ )
+ for mode_cls in [CBC, CFB, OFB, ECB]:
+ self.register_cipher_adapter(
+ _SEEDInternal,
+ mode_cls,
+ GetCipherByName("seed-{mode.name}"),
+ )
+ for cipher_cls, mode_cls in itertools.product(
+ [_CAST5Internal, _IDEAInternal],
+ [CBC, OFB, CFB, ECB],
+ ):
+ self.register_cipher_adapter(
+ cipher_cls,
+ mode_cls,
+ GetCipherByName("{cipher.name}-{mode.name}"),
+ )
+ self.register_cipher_adapter(
+ ARC4, type(None), GetCipherByName("rc4")
+ )
+ # We don't actually support RC2, this is just used by some tests.
+ self.register_cipher_adapter(
+ _RC2, type(None), GetCipherByName("rc2")
+ )
+
+ def create_symmetric_encryption_ctx(
+ self, cipher: CipherAlgorithm, mode: Mode
+ ) -> _CipherContext:
+ return _CipherContext(self, cipher, mode, _CipherContext._ENCRYPT)
+
+ def create_symmetric_decryption_ctx(
+ self, cipher: CipherAlgorithm, mode: Mode
+ ) -> _CipherContext:
+ return _CipherContext(self, cipher, mode, _CipherContext._DECRYPT)
+
+ def pbkdf2_hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool:
+ return self.hmac_supported(algorithm)
+
+ def derive_pbkdf2_hmac(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ salt: bytes,
+ iterations: int,
+ key_material: bytes,
+ ) -> bytes:
+ buf = self._ffi.new("unsigned char[]", length)
+ evp_md = self._evp_md_non_null_from_algorithm(algorithm)
+ key_material_ptr = self._ffi.from_buffer(key_material)
+ res = self._lib.PKCS5_PBKDF2_HMAC(
+ key_material_ptr,
+ len(key_material),
+ salt,
+ len(salt),
+ iterations,
+ evp_md,
+ length,
+ buf,
+ )
+ self.openssl_assert(res == 1)
+ return self._ffi.buffer(buf)[:]
+
+ def _consume_errors(self) -> typing.List[rust_openssl.OpenSSLError]:
+ return rust_openssl.capture_error_stack()
+
+ def _bn_to_int(self, bn) -> int:
+ assert bn != self._ffi.NULL
+ self.openssl_assert(not self._lib.BN_is_negative(bn))
+
+ bn_num_bytes = self._lib.BN_num_bytes(bn)
+ bin_ptr = self._ffi.new("unsigned char[]", bn_num_bytes)
+ bin_len = self._lib.BN_bn2bin(bn, bin_ptr)
+ # A zero length means the BN has value 0
+ self.openssl_assert(bin_len >= 0)
+ val = int.from_bytes(self._ffi.buffer(bin_ptr)[:bin_len], "big")
+ return val
+
+ def _int_to_bn(self, num: int, bn=None):
+ """
+ Converts a python integer to a BIGNUM. The returned BIGNUM will not
+ be garbage collected (to support adding them to structs that take
+ ownership of the object). Be sure to register it for GC if it will
+ be discarded after use.
+ """
+ assert bn is None or bn != self._ffi.NULL
+
+ if bn is None:
+ bn = self._ffi.NULL
+
+ binary = num.to_bytes(int(num.bit_length() / 8.0 + 1), "big")
+ bn_ptr = self._lib.BN_bin2bn(binary, len(binary), bn)
+ self.openssl_assert(bn_ptr != self._ffi.NULL)
+ return bn_ptr
+
+ def generate_rsa_private_key(
+ self, public_exponent: int, key_size: int
+ ) -> rsa.RSAPrivateKey:
+ rsa._verify_rsa_parameters(public_exponent, key_size)
+
+ rsa_cdata = self._lib.RSA_new()
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+
+ bn = self._int_to_bn(public_exponent)
+ bn = self._ffi.gc(bn, self._lib.BN_free)
+
+ res = self._lib.RSA_generate_key_ex(
+ rsa_cdata, key_size, bn, self._ffi.NULL
+ )
+ self.openssl_assert(res == 1)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+
+ # We can skip RSA key validation here since we just generated the key
+ return _RSAPrivateKey(
+ self, rsa_cdata, evp_pkey, unsafe_skip_rsa_key_validation=True
+ )
+
+ def generate_rsa_parameters_supported(
+ self, public_exponent: int, key_size: int
+ ) -> bool:
+ return (
+ public_exponent >= 3
+ and public_exponent & 1 != 0
+ and key_size >= 512
+ )
+
+ def load_rsa_private_numbers(
+ self,
+ numbers: rsa.RSAPrivateNumbers,
+ unsafe_skip_rsa_key_validation: bool,
+ ) -> rsa.RSAPrivateKey:
+ rsa._check_private_key_components(
+ numbers.p,
+ numbers.q,
+ numbers.d,
+ numbers.dmp1,
+ numbers.dmq1,
+ numbers.iqmp,
+ numbers.public_numbers.e,
+ numbers.public_numbers.n,
+ )
+ rsa_cdata = self._lib.RSA_new()
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ p = self._int_to_bn(numbers.p)
+ q = self._int_to_bn(numbers.q)
+ d = self._int_to_bn(numbers.d)
+ dmp1 = self._int_to_bn(numbers.dmp1)
+ dmq1 = self._int_to_bn(numbers.dmq1)
+ iqmp = self._int_to_bn(numbers.iqmp)
+ e = self._int_to_bn(numbers.public_numbers.e)
+ n = self._int_to_bn(numbers.public_numbers.n)
+ res = self._lib.RSA_set0_factors(rsa_cdata, p, q)
+ self.openssl_assert(res == 1)
+ res = self._lib.RSA_set0_key(rsa_cdata, n, e, d)
+ self.openssl_assert(res == 1)
+ res = self._lib.RSA_set0_crt_params(rsa_cdata, dmp1, dmq1, iqmp)
+ self.openssl_assert(res == 1)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+
+ return _RSAPrivateKey(
+ self,
+ rsa_cdata,
+ evp_pkey,
+ unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation,
+ )
+
+ def load_rsa_public_numbers(
+ self, numbers: rsa.RSAPublicNumbers
+ ) -> rsa.RSAPublicKey:
+ rsa._check_public_key_components(numbers.e, numbers.n)
+ rsa_cdata = self._lib.RSA_new()
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ e = self._int_to_bn(numbers.e)
+ n = self._int_to_bn(numbers.n)
+ res = self._lib.RSA_set0_key(rsa_cdata, n, e, self._ffi.NULL)
+ self.openssl_assert(res == 1)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+
+ return _RSAPublicKey(self, rsa_cdata, evp_pkey)
+
+ def _create_evp_pkey_gc(self):
+ evp_pkey = self._lib.EVP_PKEY_new()
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ return evp_pkey
+
+ def _rsa_cdata_to_evp_pkey(self, rsa_cdata):
+ evp_pkey = self._create_evp_pkey_gc()
+ res = self._lib.EVP_PKEY_set1_RSA(evp_pkey, rsa_cdata)
+ self.openssl_assert(res == 1)
+ return evp_pkey
+
+ def _bytes_to_bio(self, data: bytes) -> _MemoryBIO:
+ """
+ Return a _MemoryBIO namedtuple of (BIO, char*).
+
+ The char* is the storage for the BIO and it must stay alive until the
+ BIO is finished with.
+ """
+ data_ptr = self._ffi.from_buffer(data)
+ bio = self._lib.BIO_new_mem_buf(data_ptr, len(data))
+ self.openssl_assert(bio != self._ffi.NULL)
+
+ return _MemoryBIO(self._ffi.gc(bio, self._lib.BIO_free), data_ptr)
+
+ def _create_mem_bio_gc(self):
+ """
+ Creates an empty memory BIO.
+ """
+ bio_method = self._lib.BIO_s_mem()
+ self.openssl_assert(bio_method != self._ffi.NULL)
+ bio = self._lib.BIO_new(bio_method)
+ self.openssl_assert(bio != self._ffi.NULL)
+ bio = self._ffi.gc(bio, self._lib.BIO_free)
+ return bio
+
+ def _read_mem_bio(self, bio) -> bytes:
+ """
+ Reads a memory BIO. This only works on memory BIOs.
+ """
+ buf = self._ffi.new("char **")
+ buf_len = self._lib.BIO_get_mem_data(bio, buf)
+ self.openssl_assert(buf_len > 0)
+ self.openssl_assert(buf[0] != self._ffi.NULL)
+ bio_data = self._ffi.buffer(buf[0], buf_len)[:]
+ return bio_data
+
+ def _evp_pkey_to_private_key(
+ self, evp_pkey, unsafe_skip_rsa_key_validation: bool
+ ) -> PrivateKeyTypes:
+ """
+ Return the appropriate type of PrivateKey given an evp_pkey cdata
+ pointer.
+ """
+
+ key_type = self._lib.EVP_PKEY_id(evp_pkey)
+
+ if key_type == self._lib.EVP_PKEY_RSA:
+ rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey)
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ return _RSAPrivateKey(
+ self,
+ rsa_cdata,
+ evp_pkey,
+ unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation,
+ )
+ elif (
+ key_type == self._lib.EVP_PKEY_RSA_PSS
+ and not self._lib.CRYPTOGRAPHY_IS_LIBRESSL
+ and not self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+ and not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111E
+ ):
+ # At the moment the way we handle RSA PSS keys is to strip the
+ # PSS constraints from them and treat them as normal RSA keys
+ # Unfortunately the RSA * itself tracks this data so we need to
+ # extract, serialize, and reload it without the constraints.
+ rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey)
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ bio = self._create_mem_bio_gc()
+ res = self._lib.i2d_RSAPrivateKey_bio(bio, rsa_cdata)
+ self.openssl_assert(res == 1)
+ return self.load_der_private_key(
+ self._read_mem_bio(bio),
+ password=None,
+ unsafe_skip_rsa_key_validation=unsafe_skip_rsa_key_validation,
+ )
+ elif key_type == self._lib.EVP_PKEY_DSA:
+ dsa_cdata = self._lib.EVP_PKEY_get1_DSA(evp_pkey)
+ self.openssl_assert(dsa_cdata != self._ffi.NULL)
+ dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free)
+ return _DSAPrivateKey(self, dsa_cdata, evp_pkey)
+ elif key_type == self._lib.EVP_PKEY_EC:
+ ec_cdata = self._lib.EVP_PKEY_get1_EC_KEY(evp_pkey)
+ self.openssl_assert(ec_cdata != self._ffi.NULL)
+ ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
+ return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
+ elif key_type in self._dh_types:
+ dh_cdata = self._lib.EVP_PKEY_get1_DH(evp_pkey)
+ self.openssl_assert(dh_cdata != self._ffi.NULL)
+ dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
+ return _DHPrivateKey(self, dh_cdata, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED25519", None):
+ # EVP_PKEY_ED25519 is not present in CRYPTOGRAPHY_IS_LIBRESSL
+ return _Ed25519PrivateKey(self, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_X448", None):
+ # EVP_PKEY_X448 is not present in CRYPTOGRAPHY_IS_LIBRESSL
+ return _X448PrivateKey(self, evp_pkey)
+ elif key_type == self._lib.EVP_PKEY_X25519:
+ return rust_openssl.x25519.private_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED448", None):
+ # EVP_PKEY_ED448 is not present in CRYPTOGRAPHY_IS_LIBRESSL
+ return _Ed448PrivateKey(self, evp_pkey)
+ else:
+ raise UnsupportedAlgorithm("Unsupported key type.")
+
+ def _evp_pkey_to_public_key(self, evp_pkey) -> PublicKeyTypes:
+ """
+ Return the appropriate type of PublicKey given an evp_pkey cdata
+ pointer.
+ """
+
+ key_type = self._lib.EVP_PKEY_id(evp_pkey)
+
+ if key_type == self._lib.EVP_PKEY_RSA:
+ rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey)
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ return _RSAPublicKey(self, rsa_cdata, evp_pkey)
+ elif (
+ key_type == self._lib.EVP_PKEY_RSA_PSS
+ and not self._lib.CRYPTOGRAPHY_IS_LIBRESSL
+ and not self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+ and not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111E
+ ):
+ rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey)
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ bio = self._create_mem_bio_gc()
+ res = self._lib.i2d_RSAPublicKey_bio(bio, rsa_cdata)
+ self.openssl_assert(res == 1)
+ return self.load_der_public_key(self._read_mem_bio(bio))
+ elif key_type == self._lib.EVP_PKEY_DSA:
+ dsa_cdata = self._lib.EVP_PKEY_get1_DSA(evp_pkey)
+ self.openssl_assert(dsa_cdata != self._ffi.NULL)
+ dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free)
+ return _DSAPublicKey(self, dsa_cdata, evp_pkey)
+ elif key_type == self._lib.EVP_PKEY_EC:
+ ec_cdata = self._lib.EVP_PKEY_get1_EC_KEY(evp_pkey)
+ if ec_cdata == self._ffi.NULL:
+ errors = self._consume_errors()
+ raise ValueError("Unable to load EC key", errors)
+ ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
+ return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey)
+ elif key_type in self._dh_types:
+ dh_cdata = self._lib.EVP_PKEY_get1_DH(evp_pkey)
+ self.openssl_assert(dh_cdata != self._ffi.NULL)
+ dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
+ return _DHPublicKey(self, dh_cdata, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED25519", None):
+ # EVP_PKEY_ED25519 is not present in CRYPTOGRAPHY_IS_LIBRESSL
+ return _Ed25519PublicKey(self, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_X448", None):
+ # EVP_PKEY_X448 is not present in CRYPTOGRAPHY_IS_LIBRESSL
+ return _X448PublicKey(self, evp_pkey)
+ elif key_type == self._lib.EVP_PKEY_X25519:
+ return rust_openssl.x25519.public_key_from_ptr(
+ int(self._ffi.cast("uintptr_t", evp_pkey))
+ )
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED448", None):
+ # EVP_PKEY_ED448 is not present in CRYPTOGRAPHY_IS_LIBRESSL
+ return _Ed448PublicKey(self, evp_pkey)
+ else:
+ raise UnsupportedAlgorithm("Unsupported key type.")
+
+ def _oaep_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool:
+ if self._fips_enabled and isinstance(algorithm, hashes.SHA1):
+ return False
+
+ return isinstance(
+ algorithm,
+ (
+ hashes.SHA1,
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+ ),
+ )
+
+ def rsa_padding_supported(self, padding: AsymmetricPadding) -> bool:
+ if isinstance(padding, PKCS1v15):
+ return True
+ elif isinstance(padding, PSS) and isinstance(padding._mgf, MGF1):
+ # SHA1 is permissible in MGF1 in FIPS even when SHA1 is blocked
+ # as signature algorithm.
+ if self._fips_enabled and isinstance(
+ padding._mgf._algorithm, hashes.SHA1
+ ):
+ return True
+ else:
+ return self.hash_supported(padding._mgf._algorithm)
+ elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1):
+ return self._oaep_hash_supported(
+ padding._mgf._algorithm
+ ) and self._oaep_hash_supported(padding._algorithm)
+ else:
+ return False
+
+ def rsa_encryption_supported(self, padding: AsymmetricPadding) -> bool:
+ if self._fips_enabled and isinstance(padding, PKCS1v15):
+ return False
+ else:
+ return self.rsa_padding_supported(padding)
+
+ def generate_dsa_parameters(self, key_size: int) -> dsa.DSAParameters:
+ if key_size not in (1024, 2048, 3072, 4096):
+ raise ValueError(
+ "Key size must be 1024, 2048, 3072, or 4096 bits."
+ )
+
+ ctx = self._lib.DSA_new()
+ self.openssl_assert(ctx != self._ffi.NULL)
+ ctx = self._ffi.gc(ctx, self._lib.DSA_free)
+
+ res = self._lib.DSA_generate_parameters_ex(
+ ctx,
+ key_size,
+ self._ffi.NULL,
+ 0,
+ self._ffi.NULL,
+ self._ffi.NULL,
+ self._ffi.NULL,
+ )
+
+ self.openssl_assert(res == 1)
+
+ return _DSAParameters(self, ctx)
+
+ def generate_dsa_private_key(
+ self, parameters: dsa.DSAParameters
+ ) -> dsa.DSAPrivateKey:
+ ctx = self._lib.DSAparams_dup(
+ parameters._dsa_cdata # type: ignore[attr-defined]
+ )
+ self.openssl_assert(ctx != self._ffi.NULL)
+ ctx = self._ffi.gc(ctx, self._lib.DSA_free)
+ self._lib.DSA_generate_key(ctx)
+ evp_pkey = self._dsa_cdata_to_evp_pkey(ctx)
+
+ return _DSAPrivateKey(self, ctx, evp_pkey)
+
+ def generate_dsa_private_key_and_parameters(
+ self, key_size: int
+ ) -> dsa.DSAPrivateKey:
+ parameters = self.generate_dsa_parameters(key_size)
+ return self.generate_dsa_private_key(parameters)
+
+ def _dsa_cdata_set_values(
+ self, dsa_cdata, p, q, g, pub_key, priv_key
+ ) -> None:
+ res = self._lib.DSA_set0_pqg(dsa_cdata, p, q, g)
+ self.openssl_assert(res == 1)
+ res = self._lib.DSA_set0_key(dsa_cdata, pub_key, priv_key)
+ self.openssl_assert(res == 1)
+
+ def load_dsa_private_numbers(
+ self, numbers: dsa.DSAPrivateNumbers
+ ) -> dsa.DSAPrivateKey:
+ dsa._check_dsa_private_numbers(numbers)
+ parameter_numbers = numbers.public_numbers.parameter_numbers
+
+ dsa_cdata = self._lib.DSA_new()
+ self.openssl_assert(dsa_cdata != self._ffi.NULL)
+ dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free)
+
+ p = self._int_to_bn(parameter_numbers.p)
+ q = self._int_to_bn(parameter_numbers.q)
+ g = self._int_to_bn(parameter_numbers.g)
+ pub_key = self._int_to_bn(numbers.public_numbers.y)
+ priv_key = self._int_to_bn(numbers.x)
+ self._dsa_cdata_set_values(dsa_cdata, p, q, g, pub_key, priv_key)
+
+ evp_pkey = self._dsa_cdata_to_evp_pkey(dsa_cdata)
+
+ return _DSAPrivateKey(self, dsa_cdata, evp_pkey)
+
+ def load_dsa_public_numbers(
+ self, numbers: dsa.DSAPublicNumbers
+ ) -> dsa.DSAPublicKey:
+ dsa._check_dsa_parameters(numbers.parameter_numbers)
+ dsa_cdata = self._lib.DSA_new()
+ self.openssl_assert(dsa_cdata != self._ffi.NULL)
+ dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free)
+
+ p = self._int_to_bn(numbers.parameter_numbers.p)
+ q = self._int_to_bn(numbers.parameter_numbers.q)
+ g = self._int_to_bn(numbers.parameter_numbers.g)
+ pub_key = self._int_to_bn(numbers.y)
+ priv_key = self._ffi.NULL
+ self._dsa_cdata_set_values(dsa_cdata, p, q, g, pub_key, priv_key)
+
+ evp_pkey = self._dsa_cdata_to_evp_pkey(dsa_cdata)
+
+ return _DSAPublicKey(self, dsa_cdata, evp_pkey)
+
+ def load_dsa_parameter_numbers(
+ self, numbers: dsa.DSAParameterNumbers
+ ) -> dsa.DSAParameters:
+ dsa._check_dsa_parameters(numbers)
+ dsa_cdata = self._lib.DSA_new()
+ self.openssl_assert(dsa_cdata != self._ffi.NULL)
+ dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free)
+
+ p = self._int_to_bn(numbers.p)
+ q = self._int_to_bn(numbers.q)
+ g = self._int_to_bn(numbers.g)
+ res = self._lib.DSA_set0_pqg(dsa_cdata, p, q, g)
+ self.openssl_assert(res == 1)
+
+ return _DSAParameters(self, dsa_cdata)
+
+ def _dsa_cdata_to_evp_pkey(self, dsa_cdata):
+ evp_pkey = self._create_evp_pkey_gc()
+ res = self._lib.EVP_PKEY_set1_DSA(evp_pkey, dsa_cdata)
+ self.openssl_assert(res == 1)
+ return evp_pkey
+
+ def dsa_supported(self) -> bool:
+ return not self._fips_enabled
+
+ def dsa_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool:
+ if not self.dsa_supported():
+ return False
+ return self.signature_hash_supported(algorithm)
+
+ def cmac_algorithm_supported(self, algorithm) -> bool:
+ return self.cipher_supported(
+ algorithm, CBC(b"\x00" * algorithm.block_size)
+ )
+
+ def create_cmac_ctx(self, algorithm: BlockCipherAlgorithm) -> _CMACContext:
+ return _CMACContext(self, algorithm)
+
+ def load_pem_private_key(
+ self,
+ data: bytes,
+ password: typing.Optional[bytes],
+ unsafe_skip_rsa_key_validation: bool,
+ ) -> PrivateKeyTypes:
+ return self._load_key(
+ self._lib.PEM_read_bio_PrivateKey,
+ data,
+ password,
+ unsafe_skip_rsa_key_validation,
+ )
+
+ def load_pem_public_key(self, data: bytes) -> PublicKeyTypes:
+ mem_bio = self._bytes_to_bio(data)
+ # In OpenSSL 3.0.x the PEM_read_bio_PUBKEY function will invoke
+ # the default password callback if you pass an encrypted private
+ # key. This is very, very, very bad as the default callback can
+ # trigger an interactive console prompt, which will hang the
+ # Python process. We therefore provide our own callback to
+ # catch this and error out properly.
+ userdata = self._ffi.new("CRYPTOGRAPHY_PASSWORD_DATA *")
+ evp_pkey = self._lib.PEM_read_bio_PUBKEY(
+ mem_bio.bio,
+ self._ffi.NULL,
+ self._ffi.addressof(
+ self._lib._original_lib, "Cryptography_pem_password_cb"
+ ),
+ userdata,
+ )
+ if evp_pkey != self._ffi.NULL:
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ return self._evp_pkey_to_public_key(evp_pkey)
+ else:
+ # It's not a (RSA/DSA/ECDSA) subjectPublicKeyInfo, but we still
+ # need to check to see if it is a pure PKCS1 RSA public key (not
+ # embedded in a subjectPublicKeyInfo)
+ self._consume_errors()
+ res = self._lib.BIO_reset(mem_bio.bio)
+ self.openssl_assert(res == 1)
+ rsa_cdata = self._lib.PEM_read_bio_RSAPublicKey(
+ mem_bio.bio,
+ self._ffi.NULL,
+ self._ffi.addressof(
+ self._lib._original_lib, "Cryptography_pem_password_cb"
+ ),
+ userdata,
+ )
+ if rsa_cdata != self._ffi.NULL:
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+ return _RSAPublicKey(self, rsa_cdata, evp_pkey)
+ else:
+ self._handle_key_loading_error()
+
+ def load_pem_parameters(self, data: bytes) -> dh.DHParameters:
+ mem_bio = self._bytes_to_bio(data)
+ # only DH is supported currently
+ dh_cdata = self._lib.PEM_read_bio_DHparams(
+ mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL
+ )
+ if dh_cdata != self._ffi.NULL:
+ dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
+ return _DHParameters(self, dh_cdata)
+ else:
+ self._handle_key_loading_error()
+
+ def load_der_private_key(
+ self,
+ data: bytes,
+ password: typing.Optional[bytes],
+ unsafe_skip_rsa_key_validation: bool,
+ ) -> PrivateKeyTypes:
+ # OpenSSL has a function called d2i_AutoPrivateKey that in theory
+ # handles this automatically, however it doesn't handle encrypted
+ # private keys. Instead we try to load the key two different ways.
+ # First we'll try to load it as a traditional key.
+ bio_data = self._bytes_to_bio(data)
+ key = self._evp_pkey_from_der_traditional_key(bio_data, password)
+ if key:
+ return self._evp_pkey_to_private_key(
+ key, unsafe_skip_rsa_key_validation
+ )
+ else:
+ # Finally we try to load it with the method that handles encrypted
+ # PKCS8 properly.
+ return self._load_key(
+ self._lib.d2i_PKCS8PrivateKey_bio,
+ data,
+ password,
+ unsafe_skip_rsa_key_validation,
+ )
+
+ def _evp_pkey_from_der_traditional_key(self, bio_data, password):
+ key = self._lib.d2i_PrivateKey_bio(bio_data.bio, self._ffi.NULL)
+ if key != self._ffi.NULL:
+ key = self._ffi.gc(key, self._lib.EVP_PKEY_free)
+ if password is not None:
+ raise TypeError(
+ "Password was given but private key is not encrypted."
+ )
+
+ return key
+ else:
+ self._consume_errors()
+ return None
+
+ def load_der_public_key(self, data: bytes) -> PublicKeyTypes:
+ mem_bio = self._bytes_to_bio(data)
+ evp_pkey = self._lib.d2i_PUBKEY_bio(mem_bio.bio, self._ffi.NULL)
+ if evp_pkey != self._ffi.NULL:
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ return self._evp_pkey_to_public_key(evp_pkey)
+ else:
+ # It's not a (RSA/DSA/ECDSA) subjectPublicKeyInfo, but we still
+ # need to check to see if it is a pure PKCS1 RSA public key (not
+ # embedded in a subjectPublicKeyInfo)
+ self._consume_errors()
+ res = self._lib.BIO_reset(mem_bio.bio)
+ self.openssl_assert(res == 1)
+ rsa_cdata = self._lib.d2i_RSAPublicKey_bio(
+ mem_bio.bio, self._ffi.NULL
+ )
+ if rsa_cdata != self._ffi.NULL:
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+ return _RSAPublicKey(self, rsa_cdata, evp_pkey)
+ else:
+ self._handle_key_loading_error()
+
+ def load_der_parameters(self, data: bytes) -> dh.DHParameters:
+ mem_bio = self._bytes_to_bio(data)
+ dh_cdata = self._lib.d2i_DHparams_bio(mem_bio.bio, self._ffi.NULL)
+ if dh_cdata != self._ffi.NULL:
+ dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
+ return _DHParameters(self, dh_cdata)
+ elif self._lib.Cryptography_HAS_EVP_PKEY_DHX:
+ # We check to see if the is dhx.
+ self._consume_errors()
+ res = self._lib.BIO_reset(mem_bio.bio)
+ self.openssl_assert(res == 1)
+ dh_cdata = self._lib.d2i_DHxparams_bio(mem_bio.bio, self._ffi.NULL)
+ if dh_cdata != self._ffi.NULL:
+ dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
+ return _DHParameters(self, dh_cdata)
+
+ self._handle_key_loading_error()
+
+ def _cert2ossl(self, cert: x509.Certificate) -> typing.Any:
+ data = cert.public_bytes(serialization.Encoding.DER)
+ mem_bio = self._bytes_to_bio(data)
+ x509 = self._lib.d2i_X509_bio(mem_bio.bio, self._ffi.NULL)
+ self.openssl_assert(x509 != self._ffi.NULL)
+ x509 = self._ffi.gc(x509, self._lib.X509_free)
+ return x509
+
+ def _ossl2cert(self, x509_ptr: typing.Any) -> x509.Certificate:
+ bio = self._create_mem_bio_gc()
+ res = self._lib.i2d_X509_bio(bio, x509_ptr)
+ self.openssl_assert(res == 1)
+ return x509.load_der_x509_certificate(self._read_mem_bio(bio))
+
+ def _check_keys_correspond(self, key1, key2) -> None:
+ if self._lib.EVP_PKEY_cmp(key1._evp_pkey, key2._evp_pkey) != 1:
+ raise ValueError("Keys do not correspond")
+
+ def _load_key(
+ self, openssl_read_func, data, password, unsafe_skip_rsa_key_validation
+ ) -> PrivateKeyTypes:
+ mem_bio = self._bytes_to_bio(data)
+
+ userdata = self._ffi.new("CRYPTOGRAPHY_PASSWORD_DATA *")
+ if password is not None:
+ utils._check_byteslike("password", password)
+ password_ptr = self._ffi.from_buffer(password)
+ userdata.password = password_ptr
+ userdata.length = len(password)
+
+ evp_pkey = openssl_read_func(
+ mem_bio.bio,
+ self._ffi.NULL,
+ self._ffi.addressof(
+ self._lib._original_lib, "Cryptography_pem_password_cb"
+ ),
+ userdata,
+ )
+
+ if evp_pkey == self._ffi.NULL:
+ if userdata.error != 0:
+ self._consume_errors()
+ if userdata.error == -1:
+ raise TypeError(
+ "Password was not given but private key is encrypted"
+ )
+ else:
+ assert userdata.error == -2
+ raise ValueError(
+ "Passwords longer than {} bytes are not supported "
+ "by this backend.".format(userdata.maxsize - 1)
+ )
+ else:
+ self._handle_key_loading_error()
+
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+
+ if password is not None and userdata.called == 0:
+ raise TypeError(
+ "Password was given but private key is not encrypted."
+ )
+
+ assert (
+ password is not None and userdata.called == 1
+ ) or password is None
+
+ return self._evp_pkey_to_private_key(
+ evp_pkey, unsafe_skip_rsa_key_validation
+ )
+
+ def _handle_key_loading_error(self) -> typing.NoReturn:
+ errors = self._consume_errors()
+
+ if not errors:
+ raise ValueError(
+ "Could not deserialize key data. The data may be in an "
+ "incorrect format or it may be encrypted with an unsupported "
+ "algorithm."
+ )
+
+ elif (
+ errors[0]._lib_reason_match(
+ self._lib.ERR_LIB_EVP, self._lib.EVP_R_BAD_DECRYPT
+ )
+ or errors[0]._lib_reason_match(
+ self._lib.ERR_LIB_PKCS12,
+ self._lib.PKCS12_R_PKCS12_CIPHERFINAL_ERROR,
+ )
+ or (
+ self._lib.Cryptography_HAS_PROVIDERS
+ and errors[0]._lib_reason_match(
+ self._lib.ERR_LIB_PROV,
+ self._lib.PROV_R_BAD_DECRYPT,
+ )
+ )
+ ):
+ raise ValueError("Bad decrypt. Incorrect password?")
+
+ elif any(
+ error._lib_reason_match(
+ self._lib.ERR_LIB_EVP,
+ self._lib.EVP_R_UNSUPPORTED_PRIVATE_KEY_ALGORITHM,
+ )
+ for error in errors
+ ):
+ raise ValueError("Unsupported public key algorithm.")
+
+ else:
+ raise ValueError(
+ "Could not deserialize key data. The data may be in an "
+ "incorrect format, it may be encrypted with an unsupported "
+ "algorithm, or it may be an unsupported key type (e.g. EC "
+ "curves with explicit parameters).",
+ errors,
+ )
+
+ def elliptic_curve_supported(self, curve: ec.EllipticCurve) -> bool:
+ try:
+ curve_nid = self._elliptic_curve_to_nid(curve)
+ except UnsupportedAlgorithm:
+ curve_nid = self._lib.NID_undef
+
+ group = self._lib.EC_GROUP_new_by_curve_name(curve_nid)
+
+ if group == self._ffi.NULL:
+ self._consume_errors()
+ return False
+ else:
+ self.openssl_assert(curve_nid != self._lib.NID_undef)
+ self._lib.EC_GROUP_free(group)
+ return True
+
+ def elliptic_curve_signature_algorithm_supported(
+ self,
+ signature_algorithm: ec.EllipticCurveSignatureAlgorithm,
+ curve: ec.EllipticCurve,
+ ) -> bool:
+ # We only support ECDSA right now.
+ if not isinstance(signature_algorithm, ec.ECDSA):
+ return False
+
+ return self.elliptic_curve_supported(curve)
+
+ def generate_elliptic_curve_private_key(
+ self, curve: ec.EllipticCurve
+ ) -> ec.EllipticCurvePrivateKey:
+ """
+ Generate a new private key on the named curve.
+ """
+
+ if self.elliptic_curve_supported(curve):
+ ec_cdata = self._ec_key_new_by_curve(curve)
+
+ res = self._lib.EC_KEY_generate_key(ec_cdata)
+ self.openssl_assert(res == 1)
+
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+
+ return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
+ else:
+ raise UnsupportedAlgorithm(
+ f"Backend object does not support {curve.name}.",
+ _Reasons.UNSUPPORTED_ELLIPTIC_CURVE,
+ )
+
+ def load_elliptic_curve_private_numbers(
+ self, numbers: ec.EllipticCurvePrivateNumbers
+ ) -> ec.EllipticCurvePrivateKey:
+ public = numbers.public_numbers
+
+ ec_cdata = self._ec_key_new_by_curve(public.curve)
+
+ private_value = self._ffi.gc(
+ self._int_to_bn(numbers.private_value), self._lib.BN_clear_free
+ )
+ res = self._lib.EC_KEY_set_private_key(ec_cdata, private_value)
+ if res != 1:
+ self._consume_errors()
+ raise ValueError("Invalid EC key.")
+
+ with self._tmp_bn_ctx() as bn_ctx:
+ self._ec_key_set_public_key_affine_coordinates(
+ ec_cdata, public.x, public.y, bn_ctx
+ )
+ # derive the expected public point and compare it to the one we
+ # just set based on the values we were given. If they don't match
+ # this isn't a valid key pair.
+ group = self._lib.EC_KEY_get0_group(ec_cdata)
+ self.openssl_assert(group != self._ffi.NULL)
+ set_point = backend._lib.EC_KEY_get0_public_key(ec_cdata)
+ self.openssl_assert(set_point != self._ffi.NULL)
+ computed_point = self._lib.EC_POINT_new(group)
+ self.openssl_assert(computed_point != self._ffi.NULL)
+ computed_point = self._ffi.gc(
+ computed_point, self._lib.EC_POINT_free
+ )
+ res = self._lib.EC_POINT_mul(
+ group,
+ computed_point,
+ private_value,
+ self._ffi.NULL,
+ self._ffi.NULL,
+ bn_ctx,
+ )
+ self.openssl_assert(res == 1)
+ if (
+ self._lib.EC_POINT_cmp(
+ group, set_point, computed_point, bn_ctx
+ )
+ != 0
+ ):
+ raise ValueError("Invalid EC key.")
+
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+
+ return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
+
+ def load_elliptic_curve_public_numbers(
+ self, numbers: ec.EllipticCurvePublicNumbers
+ ) -> ec.EllipticCurvePublicKey:
+ ec_cdata = self._ec_key_new_by_curve(numbers.curve)
+ with self._tmp_bn_ctx() as bn_ctx:
+ self._ec_key_set_public_key_affine_coordinates(
+ ec_cdata, numbers.x, numbers.y, bn_ctx
+ )
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+
+ return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey)
+
+ def load_elliptic_curve_public_bytes(
+ self, curve: ec.EllipticCurve, point_bytes: bytes
+ ) -> ec.EllipticCurvePublicKey:
+ ec_cdata = self._ec_key_new_by_curve(curve)
+ group = self._lib.EC_KEY_get0_group(ec_cdata)
+ self.openssl_assert(group != self._ffi.NULL)
+ point = self._lib.EC_POINT_new(group)
+ self.openssl_assert(point != self._ffi.NULL)
+ point = self._ffi.gc(point, self._lib.EC_POINT_free)
+ with self._tmp_bn_ctx() as bn_ctx:
+ res = self._lib.EC_POINT_oct2point(
+ group, point, point_bytes, len(point_bytes), bn_ctx
+ )
+ if res != 1:
+ self._consume_errors()
+ raise ValueError("Invalid public bytes for the given curve")
+
+ res = self._lib.EC_KEY_set_public_key(ec_cdata, point)
+ self.openssl_assert(res == 1)
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+ return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey)
+
+ def derive_elliptic_curve_private_key(
+ self, private_value: int, curve: ec.EllipticCurve
+ ) -> ec.EllipticCurvePrivateKey:
+ ec_cdata = self._ec_key_new_by_curve(curve)
+
+ group = self._lib.EC_KEY_get0_group(ec_cdata)
+ self.openssl_assert(group != self._ffi.NULL)
+
+ point = self._lib.EC_POINT_new(group)
+ self.openssl_assert(point != self._ffi.NULL)
+ point = self._ffi.gc(point, self._lib.EC_POINT_free)
+
+ value = self._int_to_bn(private_value)
+ value = self._ffi.gc(value, self._lib.BN_clear_free)
+
+ with self._tmp_bn_ctx() as bn_ctx:
+ res = self._lib.EC_POINT_mul(
+ group, point, value, self._ffi.NULL, self._ffi.NULL, bn_ctx
+ )
+ self.openssl_assert(res == 1)
+
+ bn_x = self._lib.BN_CTX_get(bn_ctx)
+ bn_y = self._lib.BN_CTX_get(bn_ctx)
+
+ res = self._lib.EC_POINT_get_affine_coordinates(
+ group, point, bn_x, bn_y, bn_ctx
+ )
+ if res != 1:
+ self._consume_errors()
+ raise ValueError("Unable to derive key from private_value")
+
+ res = self._lib.EC_KEY_set_public_key(ec_cdata, point)
+ self.openssl_assert(res == 1)
+ private = self._int_to_bn(private_value)
+ private = self._ffi.gc(private, self._lib.BN_clear_free)
+ res = self._lib.EC_KEY_set_private_key(ec_cdata, private)
+ self.openssl_assert(res == 1)
+
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+
+ return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
+
+ def _ec_key_new_by_curve(self, curve: ec.EllipticCurve):
+ curve_nid = self._elliptic_curve_to_nid(curve)
+ return self._ec_key_new_by_curve_nid(curve_nid)
+
+ def _ec_key_new_by_curve_nid(self, curve_nid: int):
+ ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid)
+ self.openssl_assert(ec_cdata != self._ffi.NULL)
+ return self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
+
+ def elliptic_curve_exchange_algorithm_supported(
+ self, algorithm: ec.ECDH, curve: ec.EllipticCurve
+ ) -> bool:
+ if self._fips_enabled and not isinstance(
+ curve, self._fips_ecdh_curves
+ ):
+ return False
+
+ return self.elliptic_curve_supported(curve) and isinstance(
+ algorithm, ec.ECDH
+ )
+
+ def _ec_cdata_to_evp_pkey(self, ec_cdata):
+ evp_pkey = self._create_evp_pkey_gc()
+ res = self._lib.EVP_PKEY_set1_EC_KEY(evp_pkey, ec_cdata)
+ self.openssl_assert(res == 1)
+ return evp_pkey
+
+ def _elliptic_curve_to_nid(self, curve: ec.EllipticCurve) -> int:
+ """
+ Get the NID for a curve name.
+ """
+
+ curve_aliases = {"secp192r1": "prime192v1", "secp256r1": "prime256v1"}
+
+ curve_name = curve_aliases.get(curve.name, curve.name)
+
+ curve_nid = self._lib.OBJ_sn2nid(curve_name.encode())
+ if curve_nid == self._lib.NID_undef:
+ raise UnsupportedAlgorithm(
+ f"{curve.name} is not a supported elliptic curve",
+ _Reasons.UNSUPPORTED_ELLIPTIC_CURVE,
+ )
+ return curve_nid
+
+ @contextmanager
+ def _tmp_bn_ctx(self):
+ bn_ctx = self._lib.BN_CTX_new()
+ self.openssl_assert(bn_ctx != self._ffi.NULL)
+ bn_ctx = self._ffi.gc(bn_ctx, self._lib.BN_CTX_free)
+ self._lib.BN_CTX_start(bn_ctx)
+ try:
+ yield bn_ctx
+ finally:
+ self._lib.BN_CTX_end(bn_ctx)
+
+ def _ec_key_set_public_key_affine_coordinates(
+ self,
+ ec_cdata,
+ x: int,
+ y: int,
+ bn_ctx,
+ ) -> None:
+ """
+ Sets the public key point in the EC_KEY context to the affine x and y
+ values.
+ """
+
+ if x < 0 or y < 0:
+ raise ValueError(
+ "Invalid EC key. Both x and y must be non-negative."
+ )
+
+ x = self._ffi.gc(self._int_to_bn(x), self._lib.BN_free)
+ y = self._ffi.gc(self._int_to_bn(y), self._lib.BN_free)
+ group = self._lib.EC_KEY_get0_group(ec_cdata)
+ self.openssl_assert(group != self._ffi.NULL)
+ point = self._lib.EC_POINT_new(group)
+ self.openssl_assert(point != self._ffi.NULL)
+ point = self._ffi.gc(point, self._lib.EC_POINT_free)
+ res = self._lib.EC_POINT_set_affine_coordinates(
+ group, point, x, y, bn_ctx
+ )
+ if res != 1:
+ self._consume_errors()
+ raise ValueError("Invalid EC key.")
+ res = self._lib.EC_KEY_set_public_key(ec_cdata, point)
+ self.openssl_assert(res == 1)
+
+ def _private_key_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PrivateFormat,
+ encryption_algorithm: serialization.KeySerializationEncryption,
+ key,
+ evp_pkey,
+ cdata,
+ ) -> bytes:
+ # validate argument types
+ if not isinstance(encoding, serialization.Encoding):
+ raise TypeError("encoding must be an item from the Encoding enum")
+ if not isinstance(format, serialization.PrivateFormat):
+ raise TypeError(
+ "format must be an item from the PrivateFormat enum"
+ )
+ if not isinstance(
+ encryption_algorithm, serialization.KeySerializationEncryption
+ ):
+ raise TypeError(
+ "Encryption algorithm must be a KeySerializationEncryption "
+ "instance"
+ )
+
+ # validate password
+ if isinstance(encryption_algorithm, serialization.NoEncryption):
+ password = b""
+ elif isinstance(
+ encryption_algorithm, serialization.BestAvailableEncryption
+ ):
+ password = encryption_algorithm.password
+ if len(password) > 1023:
+ raise ValueError(
+ "Passwords longer than 1023 bytes are not supported by "
+ "this backend"
+ )
+ elif (
+ isinstance(
+ encryption_algorithm, serialization._KeySerializationEncryption
+ )
+ and encryption_algorithm._format
+ is format
+ is serialization.PrivateFormat.OpenSSH
+ ):
+ password = encryption_algorithm.password
+ else:
+ raise ValueError("Unsupported encryption type")
+
+ # PKCS8 + PEM/DER
+ if format is serialization.PrivateFormat.PKCS8:
+ if encoding is serialization.Encoding.PEM:
+ write_bio = self._lib.PEM_write_bio_PKCS8PrivateKey
+ elif encoding is serialization.Encoding.DER:
+ write_bio = self._lib.i2d_PKCS8PrivateKey_bio
+ else:
+ raise ValueError("Unsupported encoding for PKCS8")
+ return self._private_key_bytes_via_bio(
+ write_bio, evp_pkey, password
+ )
+
+ # TraditionalOpenSSL + PEM/DER
+ if format is serialization.PrivateFormat.TraditionalOpenSSL:
+ if self._fips_enabled and not isinstance(
+ encryption_algorithm, serialization.NoEncryption
+ ):
+ raise ValueError(
+ "Encrypted traditional OpenSSL format is not "
+ "supported in FIPS mode."
+ )
+ key_type = self._lib.EVP_PKEY_id(evp_pkey)
+
+ if encoding is serialization.Encoding.PEM:
+ if key_type == self._lib.EVP_PKEY_RSA:
+ write_bio = self._lib.PEM_write_bio_RSAPrivateKey
+ elif key_type == self._lib.EVP_PKEY_DSA:
+ write_bio = self._lib.PEM_write_bio_DSAPrivateKey
+ elif key_type == self._lib.EVP_PKEY_EC:
+ write_bio = self._lib.PEM_write_bio_ECPrivateKey
+ else:
+ raise ValueError(
+ "Unsupported key type for TraditionalOpenSSL"
+ )
+ return self._private_key_bytes_via_bio(
+ write_bio, cdata, password
+ )
+
+ if encoding is serialization.Encoding.DER:
+ if password:
+ raise ValueError(
+ "Encryption is not supported for DER encoded "
+ "traditional OpenSSL keys"
+ )
+ if key_type == self._lib.EVP_PKEY_RSA:
+ write_bio = self._lib.i2d_RSAPrivateKey_bio
+ elif key_type == self._lib.EVP_PKEY_EC:
+ write_bio = self._lib.i2d_ECPrivateKey_bio
+ elif key_type == self._lib.EVP_PKEY_DSA:
+ write_bio = self._lib.i2d_DSAPrivateKey_bio
+ else:
+ raise ValueError(
+ "Unsupported key type for TraditionalOpenSSL"
+ )
+ return self._bio_func_output(write_bio, cdata)
+
+ raise ValueError("Unsupported encoding for TraditionalOpenSSL")
+
+ # OpenSSH + PEM
+ if format is serialization.PrivateFormat.OpenSSH:
+ if encoding is serialization.Encoding.PEM:
+ return ssh._serialize_ssh_private_key(
+ key, password, encryption_algorithm
+ )
+
+ raise ValueError(
+ "OpenSSH private key format can only be used"
+ " with PEM encoding"
+ )
+
+ # Anything that key-specific code was supposed to handle earlier,
+ # like Raw.
+ raise ValueError("format is invalid with this key")
+
+ def _private_key_bytes_via_bio(
+ self, write_bio, evp_pkey, password
+ ) -> bytes:
+ if not password:
+ evp_cipher = self._ffi.NULL
+ else:
+ # This is a curated value that we will update over time.
+ evp_cipher = self._lib.EVP_get_cipherbyname(b"aes-256-cbc")
+
+ return self._bio_func_output(
+ write_bio,
+ evp_pkey,
+ evp_cipher,
+ password,
+ len(password),
+ self._ffi.NULL,
+ self._ffi.NULL,
+ )
+
+ def _bio_func_output(self, write_bio, *args) -> bytes:
+ bio = self._create_mem_bio_gc()
+ res = write_bio(bio, *args)
+ self.openssl_assert(res == 1)
+ return self._read_mem_bio(bio)
+
+ def _public_key_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PublicFormat,
+ key,
+ evp_pkey,
+ cdata,
+ ) -> bytes:
+ if not isinstance(encoding, serialization.Encoding):
+ raise TypeError("encoding must be an item from the Encoding enum")
+ if not isinstance(format, serialization.PublicFormat):
+ raise TypeError(
+ "format must be an item from the PublicFormat enum"
+ )
+
+ # SubjectPublicKeyInfo + PEM/DER
+ if format is serialization.PublicFormat.SubjectPublicKeyInfo:
+ if encoding is serialization.Encoding.PEM:
+ write_bio = self._lib.PEM_write_bio_PUBKEY
+ elif encoding is serialization.Encoding.DER:
+ write_bio = self._lib.i2d_PUBKEY_bio
+ else:
+ raise ValueError(
+ "SubjectPublicKeyInfo works only with PEM or DER encoding"
+ )
+ return self._bio_func_output(write_bio, evp_pkey)
+
+ # PKCS1 + PEM/DER
+ if format is serialization.PublicFormat.PKCS1:
+ # Only RSA is supported here.
+ key_type = self._lib.EVP_PKEY_id(evp_pkey)
+ if key_type != self._lib.EVP_PKEY_RSA:
+ raise ValueError("PKCS1 format is supported only for RSA keys")
+
+ if encoding is serialization.Encoding.PEM:
+ write_bio = self._lib.PEM_write_bio_RSAPublicKey
+ elif encoding is serialization.Encoding.DER:
+ write_bio = self._lib.i2d_RSAPublicKey_bio
+ else:
+ raise ValueError("PKCS1 works only with PEM or DER encoding")
+ return self._bio_func_output(write_bio, cdata)
+
+ # OpenSSH + OpenSSH
+ if format is serialization.PublicFormat.OpenSSH:
+ if encoding is serialization.Encoding.OpenSSH:
+ return ssh.serialize_ssh_public_key(key)
+
+ raise ValueError(
+ "OpenSSH format must be used with OpenSSH encoding"
+ )
+
+ # Anything that key-specific code was supposed to handle earlier,
+ # like Raw, CompressedPoint, UncompressedPoint
+ raise ValueError("format is invalid with this key")
+
+ def dh_supported(self) -> bool:
+ return not self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+
+ def generate_dh_parameters(
+ self, generator: int, key_size: int
+ ) -> dh.DHParameters:
+ if key_size < dh._MIN_MODULUS_SIZE:
+ raise ValueError(
+ "DH key_size must be at least {} bits".format(
+ dh._MIN_MODULUS_SIZE
+ )
+ )
+
+ if generator not in (2, 5):
+ raise ValueError("DH generator must be 2 or 5")
+
+ dh_param_cdata = self._lib.DH_new()
+ self.openssl_assert(dh_param_cdata != self._ffi.NULL)
+ dh_param_cdata = self._ffi.gc(dh_param_cdata, self._lib.DH_free)
+
+ res = self._lib.DH_generate_parameters_ex(
+ dh_param_cdata, key_size, generator, self._ffi.NULL
+ )
+ if res != 1:
+ errors = self._consume_errors()
+ raise ValueError("Unable to generate DH parameters", errors)
+
+ return _DHParameters(self, dh_param_cdata)
+
+ def _dh_cdata_to_evp_pkey(self, dh_cdata):
+ evp_pkey = self._create_evp_pkey_gc()
+ res = self._lib.EVP_PKEY_set1_DH(evp_pkey, dh_cdata)
+ self.openssl_assert(res == 1)
+ return evp_pkey
+
+ def generate_dh_private_key(
+ self, parameters: dh.DHParameters
+ ) -> dh.DHPrivateKey:
+ dh_key_cdata = _dh_params_dup(
+ parameters._dh_cdata, self # type: ignore[attr-defined]
+ )
+
+ res = self._lib.DH_generate_key(dh_key_cdata)
+ self.openssl_assert(res == 1)
+
+ evp_pkey = self._dh_cdata_to_evp_pkey(dh_key_cdata)
+
+ return _DHPrivateKey(self, dh_key_cdata, evp_pkey)
+
+ def generate_dh_private_key_and_parameters(
+ self, generator: int, key_size: int
+ ) -> dh.DHPrivateKey:
+ return self.generate_dh_private_key(
+ self.generate_dh_parameters(generator, key_size)
+ )
+
+ def load_dh_private_numbers(
+ self, numbers: dh.DHPrivateNumbers
+ ) -> dh.DHPrivateKey:
+ parameter_numbers = numbers.public_numbers.parameter_numbers
+
+ dh_cdata = self._lib.DH_new()
+ self.openssl_assert(dh_cdata != self._ffi.NULL)
+ dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
+
+ p = self._int_to_bn(parameter_numbers.p)
+ g = self._int_to_bn(parameter_numbers.g)
+
+ if parameter_numbers.q is not None:
+ q = self._int_to_bn(parameter_numbers.q)
+ else:
+ q = self._ffi.NULL
+
+ pub_key = self._int_to_bn(numbers.public_numbers.y)
+ priv_key = self._int_to_bn(numbers.x)
+
+ res = self._lib.DH_set0_pqg(dh_cdata, p, q, g)
+ self.openssl_assert(res == 1)
+
+ res = self._lib.DH_set0_key(dh_cdata, pub_key, priv_key)
+ self.openssl_assert(res == 1)
+
+ codes = self._ffi.new("int[]", 1)
+ res = self._lib.DH_check(dh_cdata, codes)
+ self.openssl_assert(res == 1)
+
+ # DH_check will return DH_NOT_SUITABLE_GENERATOR if p % 24 does not
+ # equal 11 when the generator is 2 (a quadratic nonresidue).
+ # We want to ignore that error because p % 24 == 23 is also fine.
+ # Specifically, g is then a quadratic residue. Within the context of
+ # Diffie-Hellman this means it can only generate half the possible
+ # values. That sounds bad, but quadratic nonresidues leak a bit of
+ # the key to the attacker in exchange for having the full key space
+ # available. See: https://crypto.stackexchange.com/questions/12961
+ if codes[0] != 0 and not (
+ parameter_numbers.g == 2
+ and codes[0] ^ self._lib.DH_NOT_SUITABLE_GENERATOR == 0
+ ):
+ raise ValueError("DH private numbers did not pass safety checks.")
+
+ evp_pkey = self._dh_cdata_to_evp_pkey(dh_cdata)
+
+ return _DHPrivateKey(self, dh_cdata, evp_pkey)
+
+ def load_dh_public_numbers(
+ self, numbers: dh.DHPublicNumbers
+ ) -> dh.DHPublicKey:
+ dh_cdata = self._lib.DH_new()
+ self.openssl_assert(dh_cdata != self._ffi.NULL)
+ dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
+
+ parameter_numbers = numbers.parameter_numbers
+
+ p = self._int_to_bn(parameter_numbers.p)
+ g = self._int_to_bn(parameter_numbers.g)
+
+ if parameter_numbers.q is not None:
+ q = self._int_to_bn(parameter_numbers.q)
+ else:
+ q = self._ffi.NULL
+
+ pub_key = self._int_to_bn(numbers.y)
+
+ res = self._lib.DH_set0_pqg(dh_cdata, p, q, g)
+ self.openssl_assert(res == 1)
+
+ res = self._lib.DH_set0_key(dh_cdata, pub_key, self._ffi.NULL)
+ self.openssl_assert(res == 1)
+
+ evp_pkey = self._dh_cdata_to_evp_pkey(dh_cdata)
+
+ return _DHPublicKey(self, dh_cdata, evp_pkey)
+
+ def load_dh_parameter_numbers(
+ self, numbers: dh.DHParameterNumbers
+ ) -> dh.DHParameters:
+ dh_cdata = self._lib.DH_new()
+ self.openssl_assert(dh_cdata != self._ffi.NULL)
+ dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
+
+ p = self._int_to_bn(numbers.p)
+ g = self._int_to_bn(numbers.g)
+
+ if numbers.q is not None:
+ q = self._int_to_bn(numbers.q)
+ else:
+ q = self._ffi.NULL
+
+ res = self._lib.DH_set0_pqg(dh_cdata, p, q, g)
+ self.openssl_assert(res == 1)
+
+ return _DHParameters(self, dh_cdata)
+
+ def dh_parameters_supported(
+ self, p: int, g: int, q: typing.Optional[int] = None
+ ) -> bool:
+ dh_cdata = self._lib.DH_new()
+ self.openssl_assert(dh_cdata != self._ffi.NULL)
+ dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
+
+ p = self._int_to_bn(p)
+ g = self._int_to_bn(g)
+
+ if q is not None:
+ q = self._int_to_bn(q)
+ else:
+ q = self._ffi.NULL
+
+ res = self._lib.DH_set0_pqg(dh_cdata, p, q, g)
+ self.openssl_assert(res == 1)
+
+ codes = self._ffi.new("int[]", 1)
+ res = self._lib.DH_check(dh_cdata, codes)
+ self.openssl_assert(res == 1)
+
+ return codes[0] == 0
+
+ def dh_x942_serialization_supported(self) -> bool:
+ return self._lib.Cryptography_HAS_EVP_PKEY_DHX == 1
+
+ def x25519_load_public_bytes(self, data: bytes) -> x25519.X25519PublicKey:
+ return rust_openssl.x25519.from_public_bytes(data)
+
+ def x25519_load_private_bytes(
+ self, data: bytes
+ ) -> x25519.X25519PrivateKey:
+ return rust_openssl.x25519.from_private_bytes(data)
+
+ def _evp_pkey_keygen_gc(self, nid):
+ evp_pkey_ctx = self._lib.EVP_PKEY_CTX_new_id(nid, self._ffi.NULL)
+ self.openssl_assert(evp_pkey_ctx != self._ffi.NULL)
+ evp_pkey_ctx = self._ffi.gc(evp_pkey_ctx, self._lib.EVP_PKEY_CTX_free)
+ res = self._lib.EVP_PKEY_keygen_init(evp_pkey_ctx)
+ self.openssl_assert(res == 1)
+ evp_ppkey = self._ffi.new("EVP_PKEY **")
+ res = self._lib.EVP_PKEY_keygen(evp_pkey_ctx, evp_ppkey)
+ self.openssl_assert(res == 1)
+ self.openssl_assert(evp_ppkey[0] != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_ppkey[0], self._lib.EVP_PKEY_free)
+ return evp_pkey
+
+ def x25519_generate_key(self) -> x25519.X25519PrivateKey:
+ return rust_openssl.x25519.generate_key()
+
+ def x25519_supported(self) -> bool:
+ if self._fips_enabled:
+ return False
+ return not self._lib.CRYPTOGRAPHY_LIBRESSL_LESS_THAN_370
+
+ def x448_load_public_bytes(self, data: bytes) -> x448.X448PublicKey:
+ if len(data) != 56:
+ raise ValueError("An X448 public key is 56 bytes long")
+
+ evp_pkey = self._lib.EVP_PKEY_new_raw_public_key(
+ self._lib.NID_X448, self._ffi.NULL, data, len(data)
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ return _X448PublicKey(self, evp_pkey)
+
+ def x448_load_private_bytes(self, data: bytes) -> x448.X448PrivateKey:
+ if len(data) != 56:
+ raise ValueError("An X448 private key is 56 bytes long")
+
+ data_ptr = self._ffi.from_buffer(data)
+ evp_pkey = self._lib.EVP_PKEY_new_raw_private_key(
+ self._lib.NID_X448, self._ffi.NULL, data_ptr, len(data)
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ return _X448PrivateKey(self, evp_pkey)
+
+ def x448_generate_key(self) -> x448.X448PrivateKey:
+ evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_X448)
+ return _X448PrivateKey(self, evp_pkey)
+
+ def x448_supported(self) -> bool:
+ if self._fips_enabled:
+ return False
+ return (
+ not self._lib.CRYPTOGRAPHY_IS_LIBRESSL
+ and not self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+ )
+
+ def ed25519_supported(self) -> bool:
+ if self._fips_enabled:
+ return False
+ return self._lib.CRYPTOGRAPHY_HAS_WORKING_ED25519
+
+ def ed25519_load_public_bytes(
+ self, data: bytes
+ ) -> ed25519.Ed25519PublicKey:
+ utils._check_bytes("data", data)
+
+ if len(data) != ed25519._ED25519_KEY_SIZE:
+ raise ValueError("An Ed25519 public key is 32 bytes long")
+
+ evp_pkey = self._lib.EVP_PKEY_new_raw_public_key(
+ self._lib.NID_ED25519, self._ffi.NULL, data, len(data)
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+
+ return _Ed25519PublicKey(self, evp_pkey)
+
+ def ed25519_load_private_bytes(
+ self, data: bytes
+ ) -> ed25519.Ed25519PrivateKey:
+ if len(data) != ed25519._ED25519_KEY_SIZE:
+ raise ValueError("An Ed25519 private key is 32 bytes long")
+
+ utils._check_byteslike("data", data)
+ data_ptr = self._ffi.from_buffer(data)
+ evp_pkey = self._lib.EVP_PKEY_new_raw_private_key(
+ self._lib.NID_ED25519, self._ffi.NULL, data_ptr, len(data)
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+
+ return _Ed25519PrivateKey(self, evp_pkey)
+
+ def ed25519_generate_key(self) -> ed25519.Ed25519PrivateKey:
+ evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_ED25519)
+ return _Ed25519PrivateKey(self, evp_pkey)
+
+ def ed448_supported(self) -> bool:
+ if self._fips_enabled:
+ return False
+ return (
+ not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111B
+ and not self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+ )
+
+ def ed448_load_public_bytes(self, data: bytes) -> ed448.Ed448PublicKey:
+ utils._check_bytes("data", data)
+ if len(data) != _ED448_KEY_SIZE:
+ raise ValueError("An Ed448 public key is 57 bytes long")
+
+ evp_pkey = self._lib.EVP_PKEY_new_raw_public_key(
+ self._lib.NID_ED448, self._ffi.NULL, data, len(data)
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+
+ return _Ed448PublicKey(self, evp_pkey)
+
+ def ed448_load_private_bytes(self, data: bytes) -> ed448.Ed448PrivateKey:
+ utils._check_byteslike("data", data)
+ if len(data) != _ED448_KEY_SIZE:
+ raise ValueError("An Ed448 private key is 57 bytes long")
+
+ data_ptr = self._ffi.from_buffer(data)
+ evp_pkey = self._lib.EVP_PKEY_new_raw_private_key(
+ self._lib.NID_ED448, self._ffi.NULL, data_ptr, len(data)
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+
+ return _Ed448PrivateKey(self, evp_pkey)
+
+ def ed448_generate_key(self) -> ed448.Ed448PrivateKey:
+ evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_ED448)
+ return _Ed448PrivateKey(self, evp_pkey)
+
+ def derive_scrypt(
+ self,
+ key_material: bytes,
+ salt: bytes,
+ length: int,
+ n: int,
+ r: int,
+ p: int,
+ ) -> bytes:
+ buf = self._ffi.new("unsigned char[]", length)
+ key_material_ptr = self._ffi.from_buffer(key_material)
+ res = self._lib.EVP_PBE_scrypt(
+ key_material_ptr,
+ len(key_material),
+ salt,
+ len(salt),
+ n,
+ r,
+ p,
+ scrypt._MEM_LIMIT,
+ buf,
+ length,
+ )
+ if res != 1:
+ errors = self._consume_errors()
+ # memory required formula explained here:
+ # https://blog.filippo.io/the-scrypt-parameters/
+ min_memory = 128 * n * r // (1024**2)
+ raise MemoryError(
+ "Not enough memory to derive key. These parameters require"
+ " {} MB of memory.".format(min_memory),
+ errors,
+ )
+ return self._ffi.buffer(buf)[:]
+
+ def aead_cipher_supported(self, cipher) -> bool:
+ cipher_name = aead._aead_cipher_name(cipher)
+ if self._fips_enabled and cipher_name not in self._fips_aead:
+ return False
+ # SIV isn't loaded through get_cipherbyname but instead a new fetch API
+ # only available in 3.0+. But if we know we're on 3.0+ then we know
+ # it's supported.
+ if cipher_name.endswith(b"-siv"):
+ return self._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER == 1
+ else:
+ return (
+ self._lib.EVP_get_cipherbyname(cipher_name) != self._ffi.NULL
+ )
+
+ def _zero_data(self, data, length: int) -> None:
+ # We clear things this way because at the moment we're not
+ # sure of a better way that can guarantee it overwrites the
+ # memory of a bytearray and doesn't just replace the underlying char *.
+ for i in range(length):
+ data[i] = 0
+
+ @contextlib.contextmanager
+ def _zeroed_null_terminated_buf(self, data):
+ """
+ This method takes bytes, which can be a bytestring or a mutable
+ buffer like a bytearray, and yields a null-terminated version of that
+ data. This is required because PKCS12_parse doesn't take a length with
+ its password char * and ffi.from_buffer doesn't provide null
+ termination. So, to support zeroing the data via bytearray we
+ need to build this ridiculous construct that copies the memory, but
+ zeroes it after use.
+ """
+ if data is None:
+ yield self._ffi.NULL
+ else:
+ data_len = len(data)
+ buf = self._ffi.new("char[]", data_len + 1)
+ self._ffi.memmove(buf, data, data_len)
+ try:
+ yield buf
+ finally:
+ # Cast to a uint8_t * so we can assign by integer
+ self._zero_data(self._ffi.cast("uint8_t *", buf), data_len)
+
+ def load_key_and_certificates_from_pkcs12(
+ self, data: bytes, password: typing.Optional[bytes]
+ ) -> typing.Tuple[
+ typing.Optional[PrivateKeyTypes],
+ typing.Optional[x509.Certificate],
+ typing.List[x509.Certificate],
+ ]:
+ pkcs12 = self.load_pkcs12(data, password)
+ return (
+ pkcs12.key,
+ pkcs12.cert.certificate if pkcs12.cert else None,
+ [cert.certificate for cert in pkcs12.additional_certs],
+ )
+
+ def load_pkcs12(
+ self, data: bytes, password: typing.Optional[bytes]
+ ) -> PKCS12KeyAndCertificates:
+ if password is not None:
+ utils._check_byteslike("password", password)
+
+ bio = self._bytes_to_bio(data)
+ p12 = self._lib.d2i_PKCS12_bio(bio.bio, self._ffi.NULL)
+ if p12 == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Could not deserialize PKCS12 data")
+
+ p12 = self._ffi.gc(p12, self._lib.PKCS12_free)
+ evp_pkey_ptr = self._ffi.new("EVP_PKEY **")
+ x509_ptr = self._ffi.new("X509 **")
+ sk_x509_ptr = self._ffi.new("Cryptography_STACK_OF_X509 **")
+ with self._zeroed_null_terminated_buf(password) as password_buf:
+ res = self._lib.PKCS12_parse(
+ p12, password_buf, evp_pkey_ptr, x509_ptr, sk_x509_ptr
+ )
+ if res == 0:
+ self._consume_errors()
+ raise ValueError("Invalid password or PKCS12 data")
+
+ cert = None
+ key = None
+ additional_certificates = []
+
+ if evp_pkey_ptr[0] != self._ffi.NULL:
+ evp_pkey = self._ffi.gc(evp_pkey_ptr[0], self._lib.EVP_PKEY_free)
+ # We don't support turning off RSA key validation when loading
+ # PKCS12 keys
+ key = self._evp_pkey_to_private_key(
+ evp_pkey, unsafe_skip_rsa_key_validation=False
+ )
+
+ if x509_ptr[0] != self._ffi.NULL:
+ x509 = self._ffi.gc(x509_ptr[0], self._lib.X509_free)
+ cert_obj = self._ossl2cert(x509)
+ name = None
+ maybe_name = self._lib.X509_alias_get0(x509, self._ffi.NULL)
+ if maybe_name != self._ffi.NULL:
+ name = self._ffi.string(maybe_name)
+ cert = PKCS12Certificate(cert_obj, name)
+
+ if sk_x509_ptr[0] != self._ffi.NULL:
+ sk_x509 = self._ffi.gc(sk_x509_ptr[0], self._lib.sk_X509_free)
+ num = self._lib.sk_X509_num(sk_x509_ptr[0])
+
+ # In OpenSSL < 3.0.0 PKCS12 parsing reverses the order of the
+ # certificates.
+ indices: typing.Iterable[int]
+ if (
+ self._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER
+ or self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+ ):
+ indices = range(num)
+ else:
+ indices = reversed(range(num))
+
+ for i in indices:
+ x509 = self._lib.sk_X509_value(sk_x509, i)
+ self.openssl_assert(x509 != self._ffi.NULL)
+ x509 = self._ffi.gc(x509, self._lib.X509_free)
+ addl_cert = self._ossl2cert(x509)
+ addl_name = None
+ maybe_name = self._lib.X509_alias_get0(x509, self._ffi.NULL)
+ if maybe_name != self._ffi.NULL:
+ addl_name = self._ffi.string(maybe_name)
+ additional_certificates.append(
+ PKCS12Certificate(addl_cert, addl_name)
+ )
+
+ return PKCS12KeyAndCertificates(key, cert, additional_certificates)
+
+ def serialize_key_and_certificates_to_pkcs12(
+ self,
+ name: typing.Optional[bytes],
+ key: typing.Optional[PKCS12PrivateKeyTypes],
+ cert: typing.Optional[x509.Certificate],
+ cas: typing.Optional[typing.List[_PKCS12CATypes]],
+ encryption_algorithm: serialization.KeySerializationEncryption,
+ ) -> bytes:
+ password = None
+ if name is not None:
+ utils._check_bytes("name", name)
+
+ if isinstance(encryption_algorithm, serialization.NoEncryption):
+ nid_cert = -1
+ nid_key = -1
+ pkcs12_iter = 0
+ mac_iter = 0
+ mac_alg = self._ffi.NULL
+ elif isinstance(
+ encryption_algorithm, serialization.BestAvailableEncryption
+ ):
+ # PKCS12 encryption is hopeless trash and can never be fixed.
+ # OpenSSL 3 supports PBESv2, but Libre and Boring do not, so
+ # we use PBESv1 with 3DES on the older paths.
+ if self._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER:
+ nid_cert = self._lib.NID_aes_256_cbc
+ nid_key = self._lib.NID_aes_256_cbc
+ else:
+ nid_cert = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC
+ nid_key = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC
+ # At least we can set this higher than OpenSSL's default
+ pkcs12_iter = 20000
+ # mac_iter chosen for compatibility reasons, see:
+ # https://www.openssl.org/docs/man1.1.1/man3/PKCS12_create.html
+ # Did we mention how lousy PKCS12 encryption is?
+ mac_iter = 1
+ # MAC algorithm can only be set on OpenSSL 3.0.0+
+ mac_alg = self._ffi.NULL
+ password = encryption_algorithm.password
+ elif (
+ isinstance(
+ encryption_algorithm, serialization._KeySerializationEncryption
+ )
+ and encryption_algorithm._format
+ is serialization.PrivateFormat.PKCS12
+ ):
+ # Default to OpenSSL's defaults. Behavior will vary based on the
+ # version of OpenSSL cryptography is compiled against.
+ nid_cert = 0
+ nid_key = 0
+ # Use the default iters we use in best available
+ pkcs12_iter = 20000
+ # See the Best Available comment for why this is 1
+ mac_iter = 1
+ password = encryption_algorithm.password
+ keycertalg = encryption_algorithm._key_cert_algorithm
+ if keycertalg is PBES.PBESv1SHA1And3KeyTripleDESCBC:
+ nid_cert = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC
+ nid_key = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC
+ elif keycertalg is PBES.PBESv2SHA256AndAES256CBC:
+ if not self._lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER:
+ raise UnsupportedAlgorithm(
+ "PBESv2 is not supported by this version of OpenSSL"
+ )
+ nid_cert = self._lib.NID_aes_256_cbc
+ nid_key = self._lib.NID_aes_256_cbc
+ else:
+ assert keycertalg is None
+ # We use OpenSSL's defaults
+
+ if encryption_algorithm._hmac_hash is not None:
+ if not self._lib.Cryptography_HAS_PKCS12_SET_MAC:
+ raise UnsupportedAlgorithm(
+ "Setting MAC algorithm is not supported by this "
+ "version of OpenSSL."
+ )
+ mac_alg = self._evp_md_non_null_from_algorithm(
+ encryption_algorithm._hmac_hash
+ )
+ self.openssl_assert(mac_alg != self._ffi.NULL)
+ else:
+ mac_alg = self._ffi.NULL
+
+ if encryption_algorithm._kdf_rounds is not None:
+ pkcs12_iter = encryption_algorithm._kdf_rounds
+
+ else:
+ raise ValueError("Unsupported key encryption type")
+
+ if cas is None or len(cas) == 0:
+ sk_x509 = self._ffi.NULL
+ else:
+ sk_x509 = self._lib.sk_X509_new_null()
+ sk_x509 = self._ffi.gc(sk_x509, self._lib.sk_X509_free)
+
+ # This list is to keep the x509 values alive until end of function
+ ossl_cas = []
+ for ca in cas:
+ if isinstance(ca, PKCS12Certificate):
+ ca_alias = ca.friendly_name
+ ossl_ca = self._cert2ossl(ca.certificate)
+ if ca_alias is None:
+ res = self._lib.X509_alias_set1(
+ ossl_ca, self._ffi.NULL, -1
+ )
+ else:
+ res = self._lib.X509_alias_set1(
+ ossl_ca, ca_alias, len(ca_alias)
+ )
+ self.openssl_assert(res == 1)
+ else:
+ ossl_ca = self._cert2ossl(ca)
+ ossl_cas.append(ossl_ca)
+ res = self._lib.sk_X509_push(sk_x509, ossl_ca)
+ backend.openssl_assert(res >= 1)
+
+ with self._zeroed_null_terminated_buf(password) as password_buf:
+ with self._zeroed_null_terminated_buf(name) as name_buf:
+ ossl_cert = self._cert2ossl(cert) if cert else self._ffi.NULL
+ if key is not None:
+ evp_pkey = key._evp_pkey # type: ignore[union-attr]
+ else:
+ evp_pkey = self._ffi.NULL
+
+ p12 = self._lib.PKCS12_create(
+ password_buf,
+ name_buf,
+ evp_pkey,
+ ossl_cert,
+ sk_x509,
+ nid_key,
+ nid_cert,
+ pkcs12_iter,
+ mac_iter,
+ 0,
+ )
+
+ if (
+ self._lib.Cryptography_HAS_PKCS12_SET_MAC
+ and mac_alg != self._ffi.NULL
+ ):
+ self._lib.PKCS12_set_mac(
+ p12,
+ password_buf,
+ -1,
+ self._ffi.NULL,
+ 0,
+ mac_iter,
+ mac_alg,
+ )
+
+ self.openssl_assert(p12 != self._ffi.NULL)
+ p12 = self._ffi.gc(p12, self._lib.PKCS12_free)
+
+ bio = self._create_mem_bio_gc()
+ res = self._lib.i2d_PKCS12_bio(bio, p12)
+ self.openssl_assert(res > 0)
+ return self._read_mem_bio(bio)
+
+ def poly1305_supported(self) -> bool:
+ if self._fips_enabled:
+ return False
+ return self._lib.Cryptography_HAS_POLY1305 == 1
+
+ def create_poly1305_ctx(self, key: bytes) -> _Poly1305Context:
+ utils._check_byteslike("key", key)
+ if len(key) != _POLY1305_KEY_SIZE:
+ raise ValueError("A poly1305 key is 32 bytes long")
+
+ return _Poly1305Context(self, key)
+
+ def pkcs7_supported(self) -> bool:
+ return not self._lib.CRYPTOGRAPHY_IS_BORINGSSL
+
+ def load_pem_pkcs7_certificates(
+ self, data: bytes
+ ) -> typing.List[x509.Certificate]:
+ utils._check_bytes("data", data)
+ bio = self._bytes_to_bio(data)
+ p7 = self._lib.PEM_read_bio_PKCS7(
+ bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL
+ )
+ if p7 == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Unable to parse PKCS7 data")
+
+ p7 = self._ffi.gc(p7, self._lib.PKCS7_free)
+ return self._load_pkcs7_certificates(p7)
+
+ def load_der_pkcs7_certificates(
+ self, data: bytes
+ ) -> typing.List[x509.Certificate]:
+ utils._check_bytes("data", data)
+ bio = self._bytes_to_bio(data)
+ p7 = self._lib.d2i_PKCS7_bio(bio.bio, self._ffi.NULL)
+ if p7 == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Unable to parse PKCS7 data")
+
+ p7 = self._ffi.gc(p7, self._lib.PKCS7_free)
+ return self._load_pkcs7_certificates(p7)
+
+ def _load_pkcs7_certificates(self, p7) -> typing.List[x509.Certificate]:
+ nid = self._lib.OBJ_obj2nid(p7.type)
+ self.openssl_assert(nid != self._lib.NID_undef)
+ if nid != self._lib.NID_pkcs7_signed:
+ raise UnsupportedAlgorithm(
+ "Only basic signed structures are currently supported. NID"
+ " for this data was {}".format(nid),
+ _Reasons.UNSUPPORTED_SERIALIZATION,
+ )
+
+ sk_x509 = p7.d.sign.cert
+ num = self._lib.sk_X509_num(sk_x509)
+ certs = []
+ for i in range(num):
+ x509 = self._lib.sk_X509_value(sk_x509, i)
+ self.openssl_assert(x509 != self._ffi.NULL)
+ cert = self._ossl2cert(x509)
+ certs.append(cert)
+
+ return certs
+
+
+class GetCipherByName:
+ def __init__(self, fmt: str):
+ self._fmt = fmt
+
+ def __call__(self, backend: Backend, cipher: CipherAlgorithm, mode: Mode):
+ cipher_name = self._fmt.format(cipher=cipher, mode=mode).lower()
+ evp_cipher = backend._lib.EVP_get_cipherbyname(
+ cipher_name.encode("ascii")
+ )
+
+ # try EVP_CIPHER_fetch if present
+ if (
+ evp_cipher == backend._ffi.NULL
+ and backend._lib.Cryptography_HAS_300_EVP_CIPHER
+ ):
+ evp_cipher = backend._lib.EVP_CIPHER_fetch(
+ backend._ffi.NULL,
+ cipher_name.encode("ascii"),
+ backend._ffi.NULL,
+ )
+
+ backend._consume_errors()
+ return evp_cipher
+
+
+def _get_xts_cipher(backend: Backend, cipher: AES, mode):
+ cipher_name = f"aes-{cipher.key_size // 2}-xts"
+ return backend._lib.EVP_get_cipherbyname(cipher_name.encode("ascii"))
+
+
+backend = Backend()
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/ciphers.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/ciphers.py
new file mode 100644
index 00000000..075d68fb
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/ciphers.py
@@ -0,0 +1,281 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.exceptions import InvalidTag, UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.primitives import ciphers
+from cryptography.hazmat.primitives.ciphers import algorithms, modes
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+
+class _CipherContext:
+ _ENCRYPT = 1
+ _DECRYPT = 0
+ _MAX_CHUNK_SIZE = 2**30 - 1
+
+ def __init__(
+ self, backend: "Backend", cipher, mode, operation: int
+ ) -> None:
+ self._backend = backend
+ self._cipher = cipher
+ self._mode = mode
+ self._operation = operation
+ self._tag: typing.Optional[bytes] = None
+
+ if isinstance(self._cipher, ciphers.BlockCipherAlgorithm):
+ self._block_size_bytes = self._cipher.block_size // 8
+ else:
+ self._block_size_bytes = 1
+
+ ctx = self._backend._lib.EVP_CIPHER_CTX_new()
+ ctx = self._backend._ffi.gc(
+ ctx, self._backend._lib.EVP_CIPHER_CTX_free
+ )
+
+ registry = self._backend._cipher_registry
+ try:
+ adapter = registry[type(cipher), type(mode)]
+ except KeyError:
+ raise UnsupportedAlgorithm(
+ "cipher {} in {} mode is not supported "
+ "by this backend.".format(
+ cipher.name, mode.name if mode else mode
+ ),
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ evp_cipher = adapter(self._backend, cipher, mode)
+ if evp_cipher == self._backend._ffi.NULL:
+ msg = f"cipher {cipher.name} "
+ if mode is not None:
+ msg += f"in {mode.name} mode "
+ msg += (
+ "is not supported by this backend (Your version of OpenSSL "
+ "may be too old. Current version: {}.)"
+ ).format(self._backend.openssl_version_text())
+ raise UnsupportedAlgorithm(msg, _Reasons.UNSUPPORTED_CIPHER)
+
+ if isinstance(mode, modes.ModeWithInitializationVector):
+ iv_nonce = self._backend._ffi.from_buffer(
+ mode.initialization_vector
+ )
+ elif isinstance(mode, modes.ModeWithTweak):
+ iv_nonce = self._backend._ffi.from_buffer(mode.tweak)
+ elif isinstance(mode, modes.ModeWithNonce):
+ iv_nonce = self._backend._ffi.from_buffer(mode.nonce)
+ elif isinstance(cipher, algorithms.ChaCha20):
+ iv_nonce = self._backend._ffi.from_buffer(cipher.nonce)
+ else:
+ iv_nonce = self._backend._ffi.NULL
+ # begin init with cipher and operation type
+ res = self._backend._lib.EVP_CipherInit_ex(
+ ctx,
+ evp_cipher,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ operation,
+ )
+ self._backend.openssl_assert(res != 0)
+ # set the key length to handle variable key ciphers
+ res = self._backend._lib.EVP_CIPHER_CTX_set_key_length(
+ ctx, len(cipher.key)
+ )
+ self._backend.openssl_assert(res != 0)
+ if isinstance(mode, modes.GCM):
+ res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx,
+ self._backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
+ len(iv_nonce),
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(res != 0)
+ if mode.tag is not None:
+ res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx,
+ self._backend._lib.EVP_CTRL_AEAD_SET_TAG,
+ len(mode.tag),
+ mode.tag,
+ )
+ self._backend.openssl_assert(res != 0)
+ self._tag = mode.tag
+
+ # pass key/iv
+ res = self._backend._lib.EVP_CipherInit_ex(
+ ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.from_buffer(cipher.key),
+ iv_nonce,
+ operation,
+ )
+
+ # Check for XTS mode duplicate keys error
+ errors = self._backend._consume_errors()
+ lib = self._backend._lib
+ if res == 0 and (
+ (
+ lib.CRYPTOGRAPHY_OPENSSL_111D_OR_GREATER
+ and errors[0]._lib_reason_match(
+ lib.ERR_LIB_EVP, lib.EVP_R_XTS_DUPLICATED_KEYS
+ )
+ )
+ or (
+ lib.Cryptography_HAS_PROVIDERS
+ and errors[0]._lib_reason_match(
+ lib.ERR_LIB_PROV, lib.PROV_R_XTS_DUPLICATED_KEYS
+ )
+ )
+ ):
+ raise ValueError("In XTS mode duplicated keys are not allowed")
+
+ self._backend.openssl_assert(res != 0, errors=errors)
+
+ # We purposely disable padding here as it's handled higher up in the
+ # API.
+ self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0)
+ self._ctx = ctx
+
+ def update(self, data: bytes) -> bytes:
+ buf = bytearray(len(data) + self._block_size_bytes - 1)
+ n = self.update_into(data, buf)
+ return bytes(buf[:n])
+
+ def update_into(self, data: bytes, buf: bytes) -> int:
+ total_data_len = len(data)
+ if len(buf) < (total_data_len + self._block_size_bytes - 1):
+ raise ValueError(
+ "buffer must be at least {} bytes for this "
+ "payload".format(len(data) + self._block_size_bytes - 1)
+ )
+
+ data_processed = 0
+ total_out = 0
+ outlen = self._backend._ffi.new("int *")
+ baseoutbuf = self._backend._ffi.from_buffer(buf, require_writable=True)
+ baseinbuf = self._backend._ffi.from_buffer(data)
+
+ while data_processed != total_data_len:
+ outbuf = baseoutbuf + total_out
+ inbuf = baseinbuf + data_processed
+ inlen = min(self._MAX_CHUNK_SIZE, total_data_len - data_processed)
+
+ res = self._backend._lib.EVP_CipherUpdate(
+ self._ctx, outbuf, outlen, inbuf, inlen
+ )
+ if res == 0 and isinstance(self._mode, modes.XTS):
+ self._backend._consume_errors()
+ raise ValueError(
+ "In XTS mode you must supply at least a full block in the "
+ "first update call. For AES this is 16 bytes."
+ )
+ else:
+ self._backend.openssl_assert(res != 0)
+ data_processed += inlen
+ total_out += outlen[0]
+
+ return total_out
+
+ def finalize(self) -> bytes:
+ if (
+ self._operation == self._DECRYPT
+ and isinstance(self._mode, modes.ModeWithAuthenticationTag)
+ and self.tag is None
+ ):
+ raise ValueError(
+ "Authentication tag must be provided when decrypting."
+ )
+
+ buf = self._backend._ffi.new("unsigned char[]", self._block_size_bytes)
+ outlen = self._backend._ffi.new("int *")
+ res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen)
+ if res == 0:
+ errors = self._backend._consume_errors()
+
+ if not errors and isinstance(self._mode, modes.GCM):
+ raise InvalidTag
+
+ lib = self._backend._lib
+ self._backend.openssl_assert(
+ errors[0]._lib_reason_match(
+ lib.ERR_LIB_EVP,
+ lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH,
+ )
+ or (
+ lib.Cryptography_HAS_PROVIDERS
+ and errors[0]._lib_reason_match(
+ lib.ERR_LIB_PROV,
+ lib.PROV_R_WRONG_FINAL_BLOCK_LENGTH,
+ )
+ )
+ or (
+ lib.CRYPTOGRAPHY_IS_BORINGSSL
+ and errors[0].reason
+ == lib.CIPHER_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH
+ ),
+ errors=errors,
+ )
+ raise ValueError(
+ "The length of the provided data is not a multiple of "
+ "the block length."
+ )
+
+ if (
+ isinstance(self._mode, modes.GCM)
+ and self._operation == self._ENCRYPT
+ ):
+ tag_buf = self._backend._ffi.new(
+ "unsigned char[]", self._block_size_bytes
+ )
+ res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
+ self._ctx,
+ self._backend._lib.EVP_CTRL_AEAD_GET_TAG,
+ self._block_size_bytes,
+ tag_buf,
+ )
+ self._backend.openssl_assert(res != 0)
+ self._tag = self._backend._ffi.buffer(tag_buf)[:]
+
+ res = self._backend._lib.EVP_CIPHER_CTX_reset(self._ctx)
+ self._backend.openssl_assert(res == 1)
+ return self._backend._ffi.buffer(buf)[: outlen[0]]
+
+ def finalize_with_tag(self, tag: bytes) -> bytes:
+ tag_len = len(tag)
+ if tag_len < self._mode._min_tag_length:
+ raise ValueError(
+ "Authentication tag must be {} bytes or longer.".format(
+ self._mode._min_tag_length
+ )
+ )
+ elif tag_len > self._block_size_bytes:
+ raise ValueError(
+ "Authentication tag cannot be more than {} bytes.".format(
+ self._block_size_bytes
+ )
+ )
+ res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
+ self._ctx, self._backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag
+ )
+ self._backend.openssl_assert(res != 0)
+ self._tag = tag
+ return self.finalize()
+
+ def authenticate_additional_data(self, data: bytes) -> None:
+ outlen = self._backend._ffi.new("int *")
+ res = self._backend._lib.EVP_CipherUpdate(
+ self._ctx,
+ self._backend._ffi.NULL,
+ outlen,
+ self._backend._ffi.from_buffer(data),
+ len(data),
+ )
+ self._backend.openssl_assert(res != 0)
+
+ @property
+ def tag(self) -> typing.Optional[bytes]:
+ return self._tag
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/cmac.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/cmac.py
new file mode 100644
index 00000000..6f736329
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/cmac.py
@@ -0,0 +1,87 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.exceptions import (
+ InvalidSignature,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.primitives import constant_time
+from cryptography.hazmat.primitives.ciphers.modes import CBC
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+ from cryptography.hazmat.primitives import ciphers
+
+
+class _CMACContext:
+ def __init__(
+ self,
+ backend: "Backend",
+ algorithm: "ciphers.BlockCipherAlgorithm",
+ ctx=None,
+ ) -> None:
+ if not backend.cmac_algorithm_supported(algorithm):
+ raise UnsupportedAlgorithm(
+ "This backend does not support CMAC.",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ self._backend = backend
+ self._key = algorithm.key
+ self._algorithm = algorithm
+ self._output_length = algorithm.block_size // 8
+
+ if ctx is None:
+ registry = self._backend._cipher_registry
+ adapter = registry[type(algorithm), CBC]
+
+ evp_cipher = adapter(self._backend, algorithm, CBC)
+
+ ctx = self._backend._lib.CMAC_CTX_new()
+
+ self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
+ ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free)
+
+ key_ptr = self._backend._ffi.from_buffer(self._key)
+ res = self._backend._lib.CMAC_Init(
+ ctx,
+ key_ptr,
+ len(self._key),
+ evp_cipher,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(res == 1)
+
+ self._ctx = ctx
+
+ def update(self, data: bytes) -> None:
+ res = self._backend._lib.CMAC_Update(self._ctx, data, len(data))
+ self._backend.openssl_assert(res == 1)
+
+ def finalize(self) -> bytes:
+ buf = self._backend._ffi.new("unsigned char[]", self._output_length)
+ length = self._backend._ffi.new("size_t *", self._output_length)
+ res = self._backend._lib.CMAC_Final(self._ctx, buf, length)
+ self._backend.openssl_assert(res == 1)
+
+ self._ctx = None
+
+ return self._backend._ffi.buffer(buf)[:]
+
+ def copy(self) -> "_CMACContext":
+ copied_ctx = self._backend._lib.CMAC_CTX_new()
+ copied_ctx = self._backend._ffi.gc(
+ copied_ctx, self._backend._lib.CMAC_CTX_free
+ )
+ res = self._backend._lib.CMAC_CTX_copy(copied_ctx, self._ctx)
+ self._backend.openssl_assert(res == 1)
+ return _CMACContext(self._backend, self._algorithm, ctx=copied_ctx)
+
+ def verify(self, signature: bytes) -> None:
+ digest = self.finalize()
+ if not constant_time.bytes_eq(digest, signature):
+ raise InvalidSignature("Signature did not match digest.")
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/decode_asn1.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/decode_asn1.py
new file mode 100644
index 00000000..df91d6d8
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/decode_asn1.py
@@ -0,0 +1,31 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+from cryptography import x509
+
+# CRLReason ::= ENUMERATED {
+# unspecified (0),
+# keyCompromise (1),
+# cACompromise (2),
+# affiliationChanged (3),
+# superseded (4),
+# cessationOfOperation (5),
+# certificateHold (6),
+# -- value 7 is not used
+# removeFromCRL (8),
+# privilegeWithdrawn (9),
+# aACompromise (10) }
+_CRL_ENTRY_REASON_ENUM_TO_CODE = {
+ x509.ReasonFlags.unspecified: 0,
+ x509.ReasonFlags.key_compromise: 1,
+ x509.ReasonFlags.ca_compromise: 2,
+ x509.ReasonFlags.affiliation_changed: 3,
+ x509.ReasonFlags.superseded: 4,
+ x509.ReasonFlags.cessation_of_operation: 5,
+ x509.ReasonFlags.certificate_hold: 6,
+ x509.ReasonFlags.remove_from_crl: 8,
+ x509.ReasonFlags.privilege_withdrawn: 9,
+ x509.ReasonFlags.aa_compromise: 10,
+}
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/dh.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/dh.py
new file mode 100644
index 00000000..87d6fb8a
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/dh.py
@@ -0,0 +1,317 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric import dh
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+
+def _dh_params_dup(dh_cdata, backend: "Backend"):
+ lib = backend._lib
+ ffi = backend._ffi
+
+ param_cdata = lib.DHparams_dup(dh_cdata)
+ backend.openssl_assert(param_cdata != ffi.NULL)
+ param_cdata = ffi.gc(param_cdata, lib.DH_free)
+ if lib.CRYPTOGRAPHY_IS_LIBRESSL:
+ # In libressl DHparams_dup don't copy q
+ q = ffi.new("BIGNUM **")
+ lib.DH_get0_pqg(dh_cdata, ffi.NULL, q, ffi.NULL)
+ q_dup = lib.BN_dup(q[0])
+ res = lib.DH_set0_pqg(param_cdata, ffi.NULL, q_dup, ffi.NULL)
+ backend.openssl_assert(res == 1)
+
+ return param_cdata
+
+
+def _dh_cdata_to_parameters(dh_cdata, backend: "Backend") -> "_DHParameters":
+ param_cdata = _dh_params_dup(dh_cdata, backend)
+ return _DHParameters(backend, param_cdata)
+
+
+class _DHParameters(dh.DHParameters):
+ def __init__(self, backend: "Backend", dh_cdata):
+ self._backend = backend
+ self._dh_cdata = dh_cdata
+
+ def parameter_numbers(self) -> dh.DHParameterNumbers:
+ p = self._backend._ffi.new("BIGNUM **")
+ g = self._backend._ffi.new("BIGNUM **")
+ q = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g)
+ self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
+ q_val: typing.Optional[int]
+ if q[0] == self._backend._ffi.NULL:
+ q_val = None
+ else:
+ q_val = self._backend._bn_to_int(q[0])
+ return dh.DHParameterNumbers(
+ p=self._backend._bn_to_int(p[0]),
+ g=self._backend._bn_to_int(g[0]),
+ q=q_val,
+ )
+
+ def generate_private_key(self) -> dh.DHPrivateKey:
+ return self._backend.generate_dh_private_key(self)
+
+ def parameter_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.ParameterFormat,
+ ) -> bytes:
+ if encoding is serialization.Encoding.OpenSSH:
+ raise TypeError("OpenSSH encoding is not supported")
+
+ if format is not serialization.ParameterFormat.PKCS3:
+ raise ValueError("Only PKCS3 serialization is supported")
+
+ q = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_pqg(
+ self._dh_cdata, self._backend._ffi.NULL, q, self._backend._ffi.NULL
+ )
+ if (
+ q[0] != self._backend._ffi.NULL
+ and not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX
+ ):
+ raise UnsupportedAlgorithm(
+ "DH X9.42 serialization is not supported",
+ _Reasons.UNSUPPORTED_SERIALIZATION,
+ )
+
+ if encoding is serialization.Encoding.PEM:
+ if q[0] != self._backend._ffi.NULL:
+ write_bio = self._backend._lib.PEM_write_bio_DHxparams
+ else:
+ write_bio = self._backend._lib.PEM_write_bio_DHparams
+ elif encoding is serialization.Encoding.DER:
+ if q[0] != self._backend._ffi.NULL:
+ write_bio = self._backend._lib.i2d_DHxparams_bio
+ else:
+ write_bio = self._backend._lib.i2d_DHparams_bio
+ else:
+ raise TypeError("encoding must be an item from the Encoding enum")
+
+ bio = self._backend._create_mem_bio_gc()
+ res = write_bio(bio, self._dh_cdata)
+ self._backend.openssl_assert(res == 1)
+ return self._backend._read_mem_bio(bio)
+
+
+def _get_dh_num_bits(backend, dh_cdata) -> int:
+ p = backend._ffi.new("BIGNUM **")
+ backend._lib.DH_get0_pqg(dh_cdata, p, backend._ffi.NULL, backend._ffi.NULL)
+ backend.openssl_assert(p[0] != backend._ffi.NULL)
+ return backend._lib.BN_num_bits(p[0])
+
+
+class _DHPrivateKey(dh.DHPrivateKey):
+ def __init__(self, backend: "Backend", dh_cdata, evp_pkey):
+ self._backend = backend
+ self._dh_cdata = dh_cdata
+ self._evp_pkey = evp_pkey
+ self._key_size_bytes = self._backend._lib.DH_size(dh_cdata)
+
+ @property
+ def key_size(self) -> int:
+ return _get_dh_num_bits(self._backend, self._dh_cdata)
+
+ def private_numbers(self) -> dh.DHPrivateNumbers:
+ p = self._backend._ffi.new("BIGNUM **")
+ g = self._backend._ffi.new("BIGNUM **")
+ q = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g)
+ self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
+ if q[0] == self._backend._ffi.NULL:
+ q_val = None
+ else:
+ q_val = self._backend._bn_to_int(q[0])
+ pub_key = self._backend._ffi.new("BIGNUM **")
+ priv_key = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_key(self._dh_cdata, pub_key, priv_key)
+ self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(priv_key[0] != self._backend._ffi.NULL)
+ return dh.DHPrivateNumbers(
+ public_numbers=dh.DHPublicNumbers(
+ parameter_numbers=dh.DHParameterNumbers(
+ p=self._backend._bn_to_int(p[0]),
+ g=self._backend._bn_to_int(g[0]),
+ q=q_val,
+ ),
+ y=self._backend._bn_to_int(pub_key[0]),
+ ),
+ x=self._backend._bn_to_int(priv_key[0]),
+ )
+
+ def exchange(self, peer_public_key: dh.DHPublicKey) -> bytes:
+ if not isinstance(peer_public_key, _DHPublicKey):
+ raise TypeError("peer_public_key must be a DHPublicKey")
+
+ ctx = self._backend._lib.EVP_PKEY_CTX_new(
+ self._evp_pkey, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
+ ctx = self._backend._ffi.gc(ctx, self._backend._lib.EVP_PKEY_CTX_free)
+ res = self._backend._lib.EVP_PKEY_derive_init(ctx)
+ self._backend.openssl_assert(res == 1)
+ res = self._backend._lib.EVP_PKEY_derive_set_peer(
+ ctx, peer_public_key._evp_pkey
+ )
+ # Invalid kex errors here in OpenSSL 3.0 because checks were moved
+ # to EVP_PKEY_derive_set_peer
+ self._exchange_assert(res == 1)
+ keylen = self._backend._ffi.new("size_t *")
+ res = self._backend._lib.EVP_PKEY_derive(
+ ctx, self._backend._ffi.NULL, keylen
+ )
+ # Invalid kex errors here in OpenSSL < 3
+ self._exchange_assert(res == 1)
+ self._backend.openssl_assert(keylen[0] > 0)
+ buf = self._backend._ffi.new("unsigned char[]", keylen[0])
+ res = self._backend._lib.EVP_PKEY_derive(ctx, buf, keylen)
+ self._backend.openssl_assert(res == 1)
+
+ key = self._backend._ffi.buffer(buf, keylen[0])[:]
+ pad = self._key_size_bytes - len(key)
+
+ if pad > 0:
+ key = (b"\x00" * pad) + key
+
+ return key
+
+ def _exchange_assert(self, ok: bool) -> None:
+ if not ok:
+ errors = self._backend._consume_errors()
+ raise ValueError(
+ "Error computing shared key.",
+ errors,
+ )
+
+ def public_key(self) -> dh.DHPublicKey:
+ dh_cdata = _dh_params_dup(self._dh_cdata, self._backend)
+ pub_key = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_key(
+ self._dh_cdata, pub_key, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
+ pub_key_dup = self._backend._lib.BN_dup(pub_key[0])
+ self._backend.openssl_assert(pub_key_dup != self._backend._ffi.NULL)
+
+ res = self._backend._lib.DH_set0_key(
+ dh_cdata, pub_key_dup, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(res == 1)
+ evp_pkey = self._backend._dh_cdata_to_evp_pkey(dh_cdata)
+ return _DHPublicKey(self._backend, dh_cdata, evp_pkey)
+
+ def parameters(self) -> dh.DHParameters:
+ return _dh_cdata_to_parameters(self._dh_cdata, self._backend)
+
+ def private_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PrivateFormat,
+ encryption_algorithm: serialization.KeySerializationEncryption,
+ ) -> bytes:
+ if format is not serialization.PrivateFormat.PKCS8:
+ raise ValueError(
+ "DH private keys support only PKCS8 serialization"
+ )
+ if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX:
+ q = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_pqg(
+ self._dh_cdata,
+ self._backend._ffi.NULL,
+ q,
+ self._backend._ffi.NULL,
+ )
+ if q[0] != self._backend._ffi.NULL:
+ raise UnsupportedAlgorithm(
+ "DH X9.42 serialization is not supported",
+ _Reasons.UNSUPPORTED_SERIALIZATION,
+ )
+
+ return self._backend._private_key_bytes(
+ encoding,
+ format,
+ encryption_algorithm,
+ self,
+ self._evp_pkey,
+ self._dh_cdata,
+ )
+
+
+class _DHPublicKey(dh.DHPublicKey):
+ def __init__(self, backend: "Backend", dh_cdata, evp_pkey):
+ self._backend = backend
+ self._dh_cdata = dh_cdata
+ self._evp_pkey = evp_pkey
+ self._key_size_bits = _get_dh_num_bits(self._backend, self._dh_cdata)
+
+ @property
+ def key_size(self) -> int:
+ return self._key_size_bits
+
+ def public_numbers(self) -> dh.DHPublicNumbers:
+ p = self._backend._ffi.new("BIGNUM **")
+ g = self._backend._ffi.new("BIGNUM **")
+ q = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g)
+ self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
+ if q[0] == self._backend._ffi.NULL:
+ q_val = None
+ else:
+ q_val = self._backend._bn_to_int(q[0])
+ pub_key = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_key(
+ self._dh_cdata, pub_key, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
+ return dh.DHPublicNumbers(
+ parameter_numbers=dh.DHParameterNumbers(
+ p=self._backend._bn_to_int(p[0]),
+ g=self._backend._bn_to_int(g[0]),
+ q=q_val,
+ ),
+ y=self._backend._bn_to_int(pub_key[0]),
+ )
+
+ def parameters(self) -> dh.DHParameters:
+ return _dh_cdata_to_parameters(self._dh_cdata, self._backend)
+
+ def public_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PublicFormat,
+ ) -> bytes:
+ if format is not serialization.PublicFormat.SubjectPublicKeyInfo:
+ raise ValueError(
+ "DH public keys support only "
+ "SubjectPublicKeyInfo serialization"
+ )
+
+ if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX:
+ q = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_pqg(
+ self._dh_cdata,
+ self._backend._ffi.NULL,
+ q,
+ self._backend._ffi.NULL,
+ )
+ if q[0] != self._backend._ffi.NULL:
+ raise UnsupportedAlgorithm(
+ "DH X9.42 serialization is not supported",
+ _Reasons.UNSUPPORTED_SERIALIZATION,
+ )
+
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/dsa.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/dsa.py
new file mode 100644
index 00000000..15bd84a7
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/dsa.py
@@ -0,0 +1,236 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import typing
+
+from cryptography.exceptions import InvalidSignature
+from cryptography.hazmat.backends.openssl.utils import (
+ _calculate_digest_and_algorithm,
+)
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import dsa
+from cryptography.hazmat.primitives.asymmetric import utils as asym_utils
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+
+def _dsa_sig_sign(
+ backend: "Backend", private_key: "_DSAPrivateKey", data: bytes
+) -> bytes:
+ sig_buf_len = backend._lib.DSA_size(private_key._dsa_cdata)
+ sig_buf = backend._ffi.new("unsigned char[]", sig_buf_len)
+ buflen = backend._ffi.new("unsigned int *")
+
+ # The first parameter passed to DSA_sign is unused by OpenSSL but
+ # must be an integer.
+ res = backend._lib.DSA_sign(
+ 0, data, len(data), sig_buf, buflen, private_key._dsa_cdata
+ )
+ backend.openssl_assert(res == 1)
+ backend.openssl_assert(buflen[0])
+
+ return backend._ffi.buffer(sig_buf)[: buflen[0]]
+
+
+def _dsa_sig_verify(
+ backend: "Backend",
+ public_key: "_DSAPublicKey",
+ signature: bytes,
+ data: bytes,
+) -> None:
+ # The first parameter passed to DSA_verify is unused by OpenSSL but
+ # must be an integer.
+ res = backend._lib.DSA_verify(
+ 0, data, len(data), signature, len(signature), public_key._dsa_cdata
+ )
+
+ if res != 1:
+ backend._consume_errors()
+ raise InvalidSignature
+
+
+class _DSAParameters(dsa.DSAParameters):
+ def __init__(self, backend: "Backend", dsa_cdata):
+ self._backend = backend
+ self._dsa_cdata = dsa_cdata
+
+ def parameter_numbers(self) -> dsa.DSAParameterNumbers:
+ p = self._backend._ffi.new("BIGNUM **")
+ q = self._backend._ffi.new("BIGNUM **")
+ g = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
+ self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
+ return dsa.DSAParameterNumbers(
+ p=self._backend._bn_to_int(p[0]),
+ q=self._backend._bn_to_int(q[0]),
+ g=self._backend._bn_to_int(g[0]),
+ )
+
+ def generate_private_key(self) -> dsa.DSAPrivateKey:
+ return self._backend.generate_dsa_private_key(self)
+
+
+class _DSAPrivateKey(dsa.DSAPrivateKey):
+ _key_size: int
+
+ def __init__(self, backend: "Backend", dsa_cdata, evp_pkey):
+ self._backend = backend
+ self._dsa_cdata = dsa_cdata
+ self._evp_pkey = evp_pkey
+
+ p = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DSA_get0_pqg(
+ dsa_cdata, p, self._backend._ffi.NULL, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(p[0] != backend._ffi.NULL)
+ self._key_size = self._backend._lib.BN_num_bits(p[0])
+
+ @property
+ def key_size(self) -> int:
+ return self._key_size
+
+ def private_numbers(self) -> dsa.DSAPrivateNumbers:
+ p = self._backend._ffi.new("BIGNUM **")
+ q = self._backend._ffi.new("BIGNUM **")
+ g = self._backend._ffi.new("BIGNUM **")
+ pub_key = self._backend._ffi.new("BIGNUM **")
+ priv_key = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
+ self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
+ self._backend._lib.DSA_get0_key(self._dsa_cdata, pub_key, priv_key)
+ self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(priv_key[0] != self._backend._ffi.NULL)
+ return dsa.DSAPrivateNumbers(
+ public_numbers=dsa.DSAPublicNumbers(
+ parameter_numbers=dsa.DSAParameterNumbers(
+ p=self._backend._bn_to_int(p[0]),
+ q=self._backend._bn_to_int(q[0]),
+ g=self._backend._bn_to_int(g[0]),
+ ),
+ y=self._backend._bn_to_int(pub_key[0]),
+ ),
+ x=self._backend._bn_to_int(priv_key[0]),
+ )
+
+ def public_key(self) -> dsa.DSAPublicKey:
+ dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata)
+ self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
+ dsa_cdata = self._backend._ffi.gc(
+ dsa_cdata, self._backend._lib.DSA_free
+ )
+ pub_key = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DSA_get0_key(
+ self._dsa_cdata, pub_key, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
+ pub_key_dup = self._backend._lib.BN_dup(pub_key[0])
+ res = self._backend._lib.DSA_set0_key(
+ dsa_cdata, pub_key_dup, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(res == 1)
+ evp_pkey = self._backend._dsa_cdata_to_evp_pkey(dsa_cdata)
+ return _DSAPublicKey(self._backend, dsa_cdata, evp_pkey)
+
+ def parameters(self) -> dsa.DSAParameters:
+ dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata)
+ self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
+ dsa_cdata = self._backend._ffi.gc(
+ dsa_cdata, self._backend._lib.DSA_free
+ )
+ return _DSAParameters(self._backend, dsa_cdata)
+
+ def private_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PrivateFormat,
+ encryption_algorithm: serialization.KeySerializationEncryption,
+ ) -> bytes:
+ return self._backend._private_key_bytes(
+ encoding,
+ format,
+ encryption_algorithm,
+ self,
+ self._evp_pkey,
+ self._dsa_cdata,
+ )
+
+ def sign(
+ self,
+ data: bytes,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> bytes:
+ data, _ = _calculate_digest_and_algorithm(data, algorithm)
+ return _dsa_sig_sign(self._backend, self, data)
+
+
+class _DSAPublicKey(dsa.DSAPublicKey):
+ _key_size: int
+
+ def __init__(self, backend: "Backend", dsa_cdata, evp_pkey):
+ self._backend = backend
+ self._dsa_cdata = dsa_cdata
+ self._evp_pkey = evp_pkey
+ p = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DSA_get0_pqg(
+ dsa_cdata, p, self._backend._ffi.NULL, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(p[0] != backend._ffi.NULL)
+ self._key_size = self._backend._lib.BN_num_bits(p[0])
+
+ @property
+ def key_size(self) -> int:
+ return self._key_size
+
+ def public_numbers(self) -> dsa.DSAPublicNumbers:
+ p = self._backend._ffi.new("BIGNUM **")
+ q = self._backend._ffi.new("BIGNUM **")
+ g = self._backend._ffi.new("BIGNUM **")
+ pub_key = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
+ self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
+ self._backend._lib.DSA_get0_key(
+ self._dsa_cdata, pub_key, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
+ return dsa.DSAPublicNumbers(
+ parameter_numbers=dsa.DSAParameterNumbers(
+ p=self._backend._bn_to_int(p[0]),
+ q=self._backend._bn_to_int(q[0]),
+ g=self._backend._bn_to_int(g[0]),
+ ),
+ y=self._backend._bn_to_int(pub_key[0]),
+ )
+
+ def parameters(self) -> dsa.DSAParameters:
+ dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata)
+ dsa_cdata = self._backend._ffi.gc(
+ dsa_cdata, self._backend._lib.DSA_free
+ )
+ return _DSAParameters(self._backend, dsa_cdata)
+
+ def public_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PublicFormat,
+ ) -> bytes:
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
+
+ def verify(
+ self,
+ signature: bytes,
+ data: bytes,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> None:
+ data, _ = _calculate_digest_and_algorithm(data, algorithm)
+ return _dsa_sig_verify(self._backend, self, signature, data)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/ec.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/ec.py
new file mode 100644
index 00000000..969306bc
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/ec.py
@@ -0,0 +1,317 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.exceptions import (
+ InvalidSignature,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends.openssl.utils import (
+ _calculate_digest_and_algorithm,
+ _evp_pkey_derive,
+)
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric import ec
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+
+def _check_signature_algorithm(
+ signature_algorithm: ec.EllipticCurveSignatureAlgorithm,
+) -> None:
+ if not isinstance(signature_algorithm, ec.ECDSA):
+ raise UnsupportedAlgorithm(
+ "Unsupported elliptic curve signature algorithm.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+
+def _ec_key_curve_sn(backend: "Backend", ec_key) -> str:
+ group = backend._lib.EC_KEY_get0_group(ec_key)
+ backend.openssl_assert(group != backend._ffi.NULL)
+
+ nid = backend._lib.EC_GROUP_get_curve_name(group)
+ # The following check is to find EC keys with unnamed curves and raise
+ # an error for now.
+ if nid == backend._lib.NID_undef:
+ raise ValueError(
+ "ECDSA keys with explicit parameters are unsupported at this time"
+ )
+
+ # This is like the above check, but it also catches the case where you
+ # explicitly encoded a curve with the same parameters as a named curve.
+ # Don't do that.
+ if (
+ not backend._lib.CRYPTOGRAPHY_IS_LIBRESSL
+ and backend._lib.EC_GROUP_get_asn1_flag(group) == 0
+ ):
+ raise ValueError(
+ "ECDSA keys with explicit parameters are unsupported at this time"
+ )
+
+ curve_name = backend._lib.OBJ_nid2sn(nid)
+ backend.openssl_assert(curve_name != backend._ffi.NULL)
+
+ sn = backend._ffi.string(curve_name).decode("ascii")
+ return sn
+
+
+def _mark_asn1_named_ec_curve(backend: "Backend", ec_cdata):
+ """
+ Set the named curve flag on the EC_KEY. This causes OpenSSL to
+ serialize EC keys along with their curve OID which makes
+ deserialization easier.
+ """
+
+ backend._lib.EC_KEY_set_asn1_flag(
+ ec_cdata, backend._lib.OPENSSL_EC_NAMED_CURVE
+ )
+
+
+def _check_key_infinity(backend: "Backend", ec_cdata) -> None:
+ point = backend._lib.EC_KEY_get0_public_key(ec_cdata)
+ backend.openssl_assert(point != backend._ffi.NULL)
+ group = backend._lib.EC_KEY_get0_group(ec_cdata)
+ backend.openssl_assert(group != backend._ffi.NULL)
+ if backend._lib.EC_POINT_is_at_infinity(group, point):
+ raise ValueError(
+ "Cannot load an EC public key where the point is at infinity"
+ )
+
+
+def _sn_to_elliptic_curve(backend: "Backend", sn: str) -> ec.EllipticCurve:
+ try:
+ return ec._CURVE_TYPES[sn]()
+ except KeyError:
+ raise UnsupportedAlgorithm(
+ f"{sn} is not a supported elliptic curve",
+ _Reasons.UNSUPPORTED_ELLIPTIC_CURVE,
+ )
+
+
+def _ecdsa_sig_sign(
+ backend: "Backend", private_key: "_EllipticCurvePrivateKey", data: bytes
+) -> bytes:
+ max_size = backend._lib.ECDSA_size(private_key._ec_key)
+ backend.openssl_assert(max_size > 0)
+
+ sigbuf = backend._ffi.new("unsigned char[]", max_size)
+ siglen_ptr = backend._ffi.new("unsigned int[]", 1)
+ res = backend._lib.ECDSA_sign(
+ 0, data, len(data), sigbuf, siglen_ptr, private_key._ec_key
+ )
+ backend.openssl_assert(res == 1)
+ return backend._ffi.buffer(sigbuf)[: siglen_ptr[0]]
+
+
+def _ecdsa_sig_verify(
+ backend: "Backend",
+ public_key: "_EllipticCurvePublicKey",
+ signature: bytes,
+ data: bytes,
+) -> None:
+ res = backend._lib.ECDSA_verify(
+ 0, data, len(data), signature, len(signature), public_key._ec_key
+ )
+ if res != 1:
+ backend._consume_errors()
+ raise InvalidSignature
+
+
+class _EllipticCurvePrivateKey(ec.EllipticCurvePrivateKey):
+ def __init__(self, backend: "Backend", ec_key_cdata, evp_pkey):
+ self._backend = backend
+ self._ec_key = ec_key_cdata
+ self._evp_pkey = evp_pkey
+
+ sn = _ec_key_curve_sn(backend, ec_key_cdata)
+ self._curve = _sn_to_elliptic_curve(backend, sn)
+ _mark_asn1_named_ec_curve(backend, ec_key_cdata)
+ _check_key_infinity(backend, ec_key_cdata)
+
+ @property
+ def curve(self) -> ec.EllipticCurve:
+ return self._curve
+
+ @property
+ def key_size(self) -> int:
+ return self.curve.key_size
+
+ def exchange(
+ self, algorithm: ec.ECDH, peer_public_key: ec.EllipticCurvePublicKey
+ ) -> bytes:
+ if not (
+ self._backend.elliptic_curve_exchange_algorithm_supported(
+ algorithm, self.curve
+ )
+ ):
+ raise UnsupportedAlgorithm(
+ "This backend does not support the ECDH algorithm.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ if peer_public_key.curve.name != self.curve.name:
+ raise ValueError(
+ "peer_public_key and self are not on the same curve"
+ )
+
+ return _evp_pkey_derive(self._backend, self._evp_pkey, peer_public_key)
+
+ def public_key(self) -> ec.EllipticCurvePublicKey:
+ group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
+ self._backend.openssl_assert(group != self._backend._ffi.NULL)
+
+ curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group)
+ public_ec_key = self._backend._ec_key_new_by_curve_nid(curve_nid)
+
+ point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
+ self._backend.openssl_assert(point != self._backend._ffi.NULL)
+
+ res = self._backend._lib.EC_KEY_set_public_key(public_ec_key, point)
+ self._backend.openssl_assert(res == 1)
+
+ evp_pkey = self._backend._ec_cdata_to_evp_pkey(public_ec_key)
+
+ return _EllipticCurvePublicKey(self._backend, public_ec_key, evp_pkey)
+
+ def private_numbers(self) -> ec.EllipticCurvePrivateNumbers:
+ bn = self._backend._lib.EC_KEY_get0_private_key(self._ec_key)
+ private_value = self._backend._bn_to_int(bn)
+ return ec.EllipticCurvePrivateNumbers(
+ private_value=private_value,
+ public_numbers=self.public_key().public_numbers(),
+ )
+
+ def private_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PrivateFormat,
+ encryption_algorithm: serialization.KeySerializationEncryption,
+ ) -> bytes:
+ return self._backend._private_key_bytes(
+ encoding,
+ format,
+ encryption_algorithm,
+ self,
+ self._evp_pkey,
+ self._ec_key,
+ )
+
+ def sign(
+ self,
+ data: bytes,
+ signature_algorithm: ec.EllipticCurveSignatureAlgorithm,
+ ) -> bytes:
+ _check_signature_algorithm(signature_algorithm)
+ data, _ = _calculate_digest_and_algorithm(
+ data,
+ signature_algorithm.algorithm,
+ )
+ return _ecdsa_sig_sign(self._backend, self, data)
+
+
+class _EllipticCurvePublicKey(ec.EllipticCurvePublicKey):
+ def __init__(self, backend: "Backend", ec_key_cdata, evp_pkey):
+ self._backend = backend
+ self._ec_key = ec_key_cdata
+ self._evp_pkey = evp_pkey
+
+ sn = _ec_key_curve_sn(backend, ec_key_cdata)
+ self._curve = _sn_to_elliptic_curve(backend, sn)
+ _mark_asn1_named_ec_curve(backend, ec_key_cdata)
+ _check_key_infinity(backend, ec_key_cdata)
+
+ @property
+ def curve(self) -> ec.EllipticCurve:
+ return self._curve
+
+ @property
+ def key_size(self) -> int:
+ return self.curve.key_size
+
+ def public_numbers(self) -> ec.EllipticCurvePublicNumbers:
+ group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
+ self._backend.openssl_assert(group != self._backend._ffi.NULL)
+
+ point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
+ self._backend.openssl_assert(point != self._backend._ffi.NULL)
+
+ with self._backend._tmp_bn_ctx() as bn_ctx:
+ bn_x = self._backend._lib.BN_CTX_get(bn_ctx)
+ bn_y = self._backend._lib.BN_CTX_get(bn_ctx)
+
+ res = self._backend._lib.EC_POINT_get_affine_coordinates(
+ group, point, bn_x, bn_y, bn_ctx
+ )
+ self._backend.openssl_assert(res == 1)
+
+ x = self._backend._bn_to_int(bn_x)
+ y = self._backend._bn_to_int(bn_y)
+
+ return ec.EllipticCurvePublicNumbers(x=x, y=y, curve=self._curve)
+
+ def _encode_point(self, format: serialization.PublicFormat) -> bytes:
+ if format is serialization.PublicFormat.CompressedPoint:
+ conversion = self._backend._lib.POINT_CONVERSION_COMPRESSED
+ else:
+ assert format is serialization.PublicFormat.UncompressedPoint
+ conversion = self._backend._lib.POINT_CONVERSION_UNCOMPRESSED
+
+ group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
+ self._backend.openssl_assert(group != self._backend._ffi.NULL)
+ point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
+ self._backend.openssl_assert(point != self._backend._ffi.NULL)
+ with self._backend._tmp_bn_ctx() as bn_ctx:
+ buflen = self._backend._lib.EC_POINT_point2oct(
+ group, point, conversion, self._backend._ffi.NULL, 0, bn_ctx
+ )
+ self._backend.openssl_assert(buflen > 0)
+ buf = self._backend._ffi.new("char[]", buflen)
+ res = self._backend._lib.EC_POINT_point2oct(
+ group, point, conversion, buf, buflen, bn_ctx
+ )
+ self._backend.openssl_assert(buflen == res)
+
+ return self._backend._ffi.buffer(buf)[:]
+
+ def public_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PublicFormat,
+ ) -> bytes:
+ if (
+ encoding is serialization.Encoding.X962
+ or format is serialization.PublicFormat.CompressedPoint
+ or format is serialization.PublicFormat.UncompressedPoint
+ ):
+ if encoding is not serialization.Encoding.X962 or format not in (
+ serialization.PublicFormat.CompressedPoint,
+ serialization.PublicFormat.UncompressedPoint,
+ ):
+ raise ValueError(
+ "X962 encoding must be used with CompressedPoint or "
+ "UncompressedPoint format"
+ )
+
+ return self._encode_point(format)
+ else:
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
+
+ def verify(
+ self,
+ signature: bytes,
+ data: bytes,
+ signature_algorithm: ec.EllipticCurveSignatureAlgorithm,
+ ) -> None:
+ _check_signature_algorithm(signature_algorithm)
+ data, _ = _calculate_digest_and_algorithm(
+ data,
+ signature_algorithm.algorithm,
+ )
+ _ecdsa_sig_verify(self._backend, self, signature, data)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/ed25519.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/ed25519.py
new file mode 100644
index 00000000..6f393e5b
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/ed25519.py
@@ -0,0 +1,155 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography import exceptions
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric.ed25519 import (
+ _ED25519_KEY_SIZE,
+ _ED25519_SIG_SIZE,
+ Ed25519PrivateKey,
+ Ed25519PublicKey,
+)
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+
+class _Ed25519PublicKey(Ed25519PublicKey):
+ def __init__(self, backend: "Backend", evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PublicFormat,
+ ) -> bytes:
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ encoding is not serialization.Encoding.Raw
+ or format is not serialization.PublicFormat.Raw
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw"
+ )
+
+ return self._raw_public_bytes()
+
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
+
+ def _raw_public_bytes(self) -> bytes:
+ buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_public_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE)
+ return self._backend._ffi.buffer(buf, _ED25519_KEY_SIZE)[:]
+
+ def verify(self, signature: bytes, data: bytes) -> None:
+ evp_md_ctx = self._backend._lib.EVP_MD_CTX_new()
+ self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
+ evp_md_ctx = self._backend._ffi.gc(
+ evp_md_ctx, self._backend._lib.EVP_MD_CTX_free
+ )
+ res = self._backend._lib.EVP_DigestVerifyInit(
+ evp_md_ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._evp_pkey,
+ )
+ self._backend.openssl_assert(res == 1)
+ res = self._backend._lib.EVP_DigestVerify(
+ evp_md_ctx, signature, len(signature), data, len(data)
+ )
+ if res != 1:
+ self._backend._consume_errors()
+ raise exceptions.InvalidSignature
+
+
+class _Ed25519PrivateKey(Ed25519PrivateKey):
+ def __init__(self, backend: "Backend", evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_key(self) -> Ed25519PublicKey:
+ buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_public_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE)
+ public_bytes = self._backend._ffi.buffer(buf)[:]
+ return self._backend.ed25519_load_public_bytes(public_bytes)
+
+ def sign(self, data: bytes) -> bytes:
+ evp_md_ctx = self._backend._lib.EVP_MD_CTX_new()
+ self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
+ evp_md_ctx = self._backend._ffi.gc(
+ evp_md_ctx, self._backend._lib.EVP_MD_CTX_free
+ )
+ res = self._backend._lib.EVP_DigestSignInit(
+ evp_md_ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._evp_pkey,
+ )
+ self._backend.openssl_assert(res == 1)
+ buf = self._backend._ffi.new("unsigned char[]", _ED25519_SIG_SIZE)
+ buflen = self._backend._ffi.new("size_t *", len(buf))
+ res = self._backend._lib.EVP_DigestSign(
+ evp_md_ctx, buf, buflen, data, len(data)
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED25519_SIG_SIZE)
+ return self._backend._ffi.buffer(buf, buflen[0])[:]
+
+ def private_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PrivateFormat,
+ encryption_algorithm: serialization.KeySerializationEncryption,
+ ) -> bytes:
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PrivateFormat.Raw
+ ):
+ if (
+ format is not serialization.PrivateFormat.Raw
+ or encoding is not serialization.Encoding.Raw
+ or not isinstance(
+ encryption_algorithm, serialization.NoEncryption
+ )
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw "
+ "and encryption_algorithm must be NoEncryption()"
+ )
+
+ return self._raw_private_bytes()
+
+ return self._backend._private_key_bytes(
+ encoding, format, encryption_algorithm, self, self._evp_pkey, None
+ )
+
+ def _raw_private_bytes(self) -> bytes:
+ buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_private_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE)
+ return self._backend._ffi.buffer(buf, _ED25519_KEY_SIZE)[:]
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/ed448.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/ed448.py
new file mode 100644
index 00000000..0d27ea63
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/ed448.py
@@ -0,0 +1,156 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography import exceptions
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric.ed448 import (
+ Ed448PrivateKey,
+ Ed448PublicKey,
+)
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+_ED448_KEY_SIZE = 57
+_ED448_SIG_SIZE = 114
+
+
+class _Ed448PublicKey(Ed448PublicKey):
+ def __init__(self, backend: "Backend", evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PublicFormat,
+ ) -> bytes:
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ encoding is not serialization.Encoding.Raw
+ or format is not serialization.PublicFormat.Raw
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw"
+ )
+
+ return self._raw_public_bytes()
+
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
+
+ def _raw_public_bytes(self) -> bytes:
+ buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_public_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE)
+ return self._backend._ffi.buffer(buf, _ED448_KEY_SIZE)[:]
+
+ def verify(self, signature: bytes, data: bytes) -> None:
+ evp_md_ctx = self._backend._lib.EVP_MD_CTX_new()
+ self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
+ evp_md_ctx = self._backend._ffi.gc(
+ evp_md_ctx, self._backend._lib.EVP_MD_CTX_free
+ )
+ res = self._backend._lib.EVP_DigestVerifyInit(
+ evp_md_ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._evp_pkey,
+ )
+ self._backend.openssl_assert(res == 1)
+ res = self._backend._lib.EVP_DigestVerify(
+ evp_md_ctx, signature, len(signature), data, len(data)
+ )
+ if res != 1:
+ self._backend._consume_errors()
+ raise exceptions.InvalidSignature
+
+
+class _Ed448PrivateKey(Ed448PrivateKey):
+ def __init__(self, backend: "Backend", evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_key(self) -> Ed448PublicKey:
+ buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_public_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE)
+ public_bytes = self._backend._ffi.buffer(buf)[:]
+ return self._backend.ed448_load_public_bytes(public_bytes)
+
+ def sign(self, data: bytes) -> bytes:
+ evp_md_ctx = self._backend._lib.EVP_MD_CTX_new()
+ self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
+ evp_md_ctx = self._backend._ffi.gc(
+ evp_md_ctx, self._backend._lib.EVP_MD_CTX_free
+ )
+ res = self._backend._lib.EVP_DigestSignInit(
+ evp_md_ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._evp_pkey,
+ )
+ self._backend.openssl_assert(res == 1)
+ buf = self._backend._ffi.new("unsigned char[]", _ED448_SIG_SIZE)
+ buflen = self._backend._ffi.new("size_t *", len(buf))
+ res = self._backend._lib.EVP_DigestSign(
+ evp_md_ctx, buf, buflen, data, len(data)
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED448_SIG_SIZE)
+ return self._backend._ffi.buffer(buf, buflen[0])[:]
+
+ def private_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PrivateFormat,
+ encryption_algorithm: serialization.KeySerializationEncryption,
+ ) -> bytes:
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PrivateFormat.Raw
+ ):
+ if (
+ format is not serialization.PrivateFormat.Raw
+ or encoding is not serialization.Encoding.Raw
+ or not isinstance(
+ encryption_algorithm, serialization.NoEncryption
+ )
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw "
+ "and encryption_algorithm must be NoEncryption()"
+ )
+
+ return self._raw_private_bytes()
+
+ return self._backend._private_key_bytes(
+ encoding, format, encryption_algorithm, self, self._evp_pkey, None
+ )
+
+ def _raw_private_bytes(self) -> bytes:
+ buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_private_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE)
+ return self._backend._ffi.buffer(buf, _ED448_KEY_SIZE)[:]
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/hashes.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/hashes.py
new file mode 100644
index 00000000..52d4646a
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/hashes.py
@@ -0,0 +1,86 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.primitives import hashes
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+
+class _HashContext(hashes.HashContext):
+ def __init__(
+ self, backend: "Backend", algorithm: hashes.HashAlgorithm, ctx=None
+ ) -> None:
+ self._algorithm = algorithm
+
+ self._backend = backend
+
+ if ctx is None:
+ ctx = self._backend._lib.EVP_MD_CTX_new()
+ ctx = self._backend._ffi.gc(
+ ctx, self._backend._lib.EVP_MD_CTX_free
+ )
+ evp_md = self._backend._evp_md_from_algorithm(algorithm)
+ if evp_md == self._backend._ffi.NULL:
+ raise UnsupportedAlgorithm(
+ "{} is not a supported hash on this backend.".format(
+ algorithm.name
+ ),
+ _Reasons.UNSUPPORTED_HASH,
+ )
+ res = self._backend._lib.EVP_DigestInit_ex(
+ ctx, evp_md, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(res != 0)
+
+ self._ctx = ctx
+
+ @property
+ def algorithm(self) -> hashes.HashAlgorithm:
+ return self._algorithm
+
+ def copy(self) -> "_HashContext":
+ copied_ctx = self._backend._lib.EVP_MD_CTX_new()
+ copied_ctx = self._backend._ffi.gc(
+ copied_ctx, self._backend._lib.EVP_MD_CTX_free
+ )
+ res = self._backend._lib.EVP_MD_CTX_copy_ex(copied_ctx, self._ctx)
+ self._backend.openssl_assert(res != 0)
+ return _HashContext(self._backend, self.algorithm, ctx=copied_ctx)
+
+ def update(self, data: bytes) -> None:
+ data_ptr = self._backend._ffi.from_buffer(data)
+ res = self._backend._lib.EVP_DigestUpdate(
+ self._ctx, data_ptr, len(data)
+ )
+ self._backend.openssl_assert(res != 0)
+
+ def finalize(self) -> bytes:
+ if isinstance(self.algorithm, hashes.ExtendableOutputFunction):
+ # extendable output functions use a different finalize
+ return self._finalize_xof()
+ else:
+ buf = self._backend._ffi.new(
+ "unsigned char[]", self._backend._lib.EVP_MAX_MD_SIZE
+ )
+ outlen = self._backend._ffi.new("unsigned int *")
+ res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen)
+ self._backend.openssl_assert(res != 0)
+ self._backend.openssl_assert(
+ outlen[0] == self.algorithm.digest_size
+ )
+ return self._backend._ffi.buffer(buf)[: outlen[0]]
+
+ def _finalize_xof(self) -> bytes:
+ buf = self._backend._ffi.new(
+ "unsigned char[]", self.algorithm.digest_size
+ )
+ res = self._backend._lib.EVP_DigestFinalXOF(
+ self._ctx, buf, self.algorithm.digest_size
+ )
+ self._backend.openssl_assert(res != 0)
+ return self._backend._ffi.buffer(buf)[: self.algorithm.digest_size]
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/hmac.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/hmac.py
new file mode 100644
index 00000000..ba3dfb53
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/hmac.py
@@ -0,0 +1,84 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.exceptions import (
+ InvalidSignature,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.primitives import constant_time, hashes
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+
+class _HMACContext(hashes.HashContext):
+ def __init__(
+ self,
+ backend: "Backend",
+ key: bytes,
+ algorithm: hashes.HashAlgorithm,
+ ctx=None,
+ ):
+ self._algorithm = algorithm
+ self._backend = backend
+
+ if ctx is None:
+ ctx = self._backend._lib.HMAC_CTX_new()
+ self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
+ ctx = self._backend._ffi.gc(ctx, self._backend._lib.HMAC_CTX_free)
+ evp_md = self._backend._evp_md_from_algorithm(algorithm)
+ if evp_md == self._backend._ffi.NULL:
+ raise UnsupportedAlgorithm(
+ "{} is not a supported hash on this backend".format(
+ algorithm.name
+ ),
+ _Reasons.UNSUPPORTED_HASH,
+ )
+ key_ptr = self._backend._ffi.from_buffer(key)
+ res = self._backend._lib.HMAC_Init_ex(
+ ctx, key_ptr, len(key), evp_md, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(res != 0)
+
+ self._ctx = ctx
+ self._key = key
+
+ @property
+ def algorithm(self) -> hashes.HashAlgorithm:
+ return self._algorithm
+
+ def copy(self) -> "_HMACContext":
+ copied_ctx = self._backend._lib.HMAC_CTX_new()
+ self._backend.openssl_assert(copied_ctx != self._backend._ffi.NULL)
+ copied_ctx = self._backend._ffi.gc(
+ copied_ctx, self._backend._lib.HMAC_CTX_free
+ )
+ res = self._backend._lib.HMAC_CTX_copy(copied_ctx, self._ctx)
+ self._backend.openssl_assert(res != 0)
+ return _HMACContext(
+ self._backend, self._key, self.algorithm, ctx=copied_ctx
+ )
+
+ def update(self, data: bytes) -> None:
+ data_ptr = self._backend._ffi.from_buffer(data)
+ res = self._backend._lib.HMAC_Update(self._ctx, data_ptr, len(data))
+ self._backend.openssl_assert(res != 0)
+
+ def finalize(self) -> bytes:
+ buf = self._backend._ffi.new(
+ "unsigned char[]", self._backend._lib.EVP_MAX_MD_SIZE
+ )
+ outlen = self._backend._ffi.new("unsigned int *")
+ res = self._backend._lib.HMAC_Final(self._ctx, buf, outlen)
+ self._backend.openssl_assert(res != 0)
+ self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size)
+ return self._backend._ffi.buffer(buf)[: outlen[0]]
+
+ def verify(self, signature: bytes) -> None:
+ digest = self.finalize()
+ if not constant_time.bytes_eq(digest, signature):
+ raise InvalidSignature("Signature did not match digest.")
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/poly1305.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/poly1305.py
new file mode 100644
index 00000000..d0d44f6f
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/poly1305.py
@@ -0,0 +1,67 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.exceptions import InvalidSignature
+from cryptography.hazmat.primitives import constant_time
+
+_POLY1305_TAG_SIZE = 16
+_POLY1305_KEY_SIZE = 32
+
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+
+class _Poly1305Context:
+ def __init__(self, backend: "Backend", key: bytes) -> None:
+ self._backend = backend
+
+ key_ptr = self._backend._ffi.from_buffer(key)
+ # This function copies the key into OpenSSL-owned memory so we don't
+ # need to retain it ourselves
+ evp_pkey = self._backend._lib.EVP_PKEY_new_raw_private_key(
+ self._backend._lib.NID_poly1305,
+ self._backend._ffi.NULL,
+ key_ptr,
+ len(key),
+ )
+ self._backend.openssl_assert(evp_pkey != self._backend._ffi.NULL)
+ self._evp_pkey = self._backend._ffi.gc(
+ evp_pkey, self._backend._lib.EVP_PKEY_free
+ )
+ ctx = self._backend._lib.EVP_MD_CTX_new()
+ self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
+ self._ctx = self._backend._ffi.gc(
+ ctx, self._backend._lib.EVP_MD_CTX_free
+ )
+ res = self._backend._lib.EVP_DigestSignInit(
+ self._ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._evp_pkey,
+ )
+ self._backend.openssl_assert(res == 1)
+
+ def update(self, data: bytes) -> None:
+ data_ptr = self._backend._ffi.from_buffer(data)
+ res = self._backend._lib.EVP_DigestSignUpdate(
+ self._ctx, data_ptr, len(data)
+ )
+ self._backend.openssl_assert(res != 0)
+
+ def finalize(self) -> bytes:
+ buf = self._backend._ffi.new("unsigned char[]", _POLY1305_TAG_SIZE)
+ outlen = self._backend._ffi.new("size_t *", _POLY1305_TAG_SIZE)
+ res = self._backend._lib.EVP_DigestSignFinal(self._ctx, buf, outlen)
+ self._backend.openssl_assert(res != 0)
+ self._backend.openssl_assert(outlen[0] == _POLY1305_TAG_SIZE)
+ return self._backend._ffi.buffer(buf)[: outlen[0]]
+
+ def verify(self, tag: bytes) -> None:
+ mac = self.finalize()
+ if not constant_time.bytes_eq(mac, tag):
+ raise InvalidSignature("Value did not match computed tag.")
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/rsa.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/rsa.py
new file mode 100644
index 00000000..c960105e
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/rsa.py
@@ -0,0 +1,588 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import threading
+import typing
+
+from cryptography.exceptions import (
+ InvalidSignature,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends.openssl.utils import (
+ _calculate_digest_and_algorithm,
+)
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import utils as asym_utils
+from cryptography.hazmat.primitives.asymmetric.padding import (
+ MGF1,
+ OAEP,
+ PSS,
+ AsymmetricPadding,
+ PKCS1v15,
+ _Auto,
+ _DigestLength,
+ _MaxLength,
+ calculate_max_pss_salt_length,
+)
+from cryptography.hazmat.primitives.asymmetric.rsa import (
+ RSAPrivateKey,
+ RSAPrivateNumbers,
+ RSAPublicKey,
+ RSAPublicNumbers,
+)
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+
+def _get_rsa_pss_salt_length(
+ backend: "Backend",
+ pss: PSS,
+ key: typing.Union[RSAPrivateKey, RSAPublicKey],
+ hash_algorithm: hashes.HashAlgorithm,
+) -> int:
+ salt = pss._salt_length
+
+ if isinstance(salt, _MaxLength):
+ return calculate_max_pss_salt_length(key, hash_algorithm)
+ elif isinstance(salt, _DigestLength):
+ return hash_algorithm.digest_size
+ elif isinstance(salt, _Auto):
+ if isinstance(key, RSAPrivateKey):
+ raise ValueError(
+ "PSS salt length can only be set to AUTO when verifying"
+ )
+ return backend._lib.RSA_PSS_SALTLEN_AUTO
+ else:
+ return salt
+
+
+def _enc_dec_rsa(
+ backend: "Backend",
+ key: typing.Union["_RSAPrivateKey", "_RSAPublicKey"],
+ data: bytes,
+ padding: AsymmetricPadding,
+) -> bytes:
+ if not isinstance(padding, AsymmetricPadding):
+ raise TypeError("Padding must be an instance of AsymmetricPadding.")
+
+ if isinstance(padding, PKCS1v15):
+ padding_enum = backend._lib.RSA_PKCS1_PADDING
+ elif isinstance(padding, OAEP):
+ padding_enum = backend._lib.RSA_PKCS1_OAEP_PADDING
+
+ if not isinstance(padding._mgf, MGF1):
+ raise UnsupportedAlgorithm(
+ "Only MGF1 is supported by this backend.",
+ _Reasons.UNSUPPORTED_MGF,
+ )
+
+ if not backend.rsa_padding_supported(padding):
+ raise UnsupportedAlgorithm(
+ "This combination of padding and hash algorithm is not "
+ "supported by this backend.",
+ _Reasons.UNSUPPORTED_PADDING,
+ )
+
+ else:
+ raise UnsupportedAlgorithm(
+ f"{padding.name} is not supported by this backend.",
+ _Reasons.UNSUPPORTED_PADDING,
+ )
+
+ return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding)
+
+
+def _enc_dec_rsa_pkey_ctx(
+ backend: "Backend",
+ key: typing.Union["_RSAPrivateKey", "_RSAPublicKey"],
+ data: bytes,
+ padding_enum: int,
+ padding: AsymmetricPadding,
+) -> bytes:
+ init: typing.Callable[[typing.Any], int]
+ crypt: typing.Callable[[typing.Any, typing.Any, int, bytes, int], int]
+ if isinstance(key, _RSAPublicKey):
+ init = backend._lib.EVP_PKEY_encrypt_init
+ crypt = backend._lib.EVP_PKEY_encrypt
+ else:
+ init = backend._lib.EVP_PKEY_decrypt_init
+ crypt = backend._lib.EVP_PKEY_decrypt
+
+ pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL)
+ backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
+ pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
+ res = init(pkey_ctx)
+ backend.openssl_assert(res == 1)
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum)
+ backend.openssl_assert(res > 0)
+ buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
+ backend.openssl_assert(buf_size > 0)
+ if isinstance(padding, OAEP):
+ mgf1_md = backend._evp_md_non_null_from_algorithm(
+ padding._mgf._algorithm
+ )
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
+ backend.openssl_assert(res > 0)
+ oaep_md = backend._evp_md_non_null_from_algorithm(padding._algorithm)
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_oaep_md(pkey_ctx, oaep_md)
+ backend.openssl_assert(res > 0)
+
+ if (
+ isinstance(padding, OAEP)
+ and padding._label is not None
+ and len(padding._label) > 0
+ ):
+ # set0_rsa_oaep_label takes ownership of the char * so we need to
+ # copy it into some new memory
+ labelptr = backend._lib.OPENSSL_malloc(len(padding._label))
+ backend.openssl_assert(labelptr != backend._ffi.NULL)
+ backend._ffi.memmove(labelptr, padding._label, len(padding._label))
+ res = backend._lib.EVP_PKEY_CTX_set0_rsa_oaep_label(
+ pkey_ctx, labelptr, len(padding._label)
+ )
+ backend.openssl_assert(res == 1)
+
+ outlen = backend._ffi.new("size_t *", buf_size)
+ buf = backend._ffi.new("unsigned char[]", buf_size)
+ # Everything from this line onwards is written with the goal of being as
+ # constant-time as is practical given the constraints of Python and our
+ # API. See Bleichenbacher's '98 attack on RSA, and its many many variants.
+ # As such, you should not attempt to change this (particularly to "clean it
+ # up") without understanding why it was written this way (see
+ # Chesterton's Fence), and without measuring to verify you have not
+ # introduced observable time differences.
+ res = crypt(pkey_ctx, buf, outlen, data, len(data))
+ resbuf = backend._ffi.buffer(buf)[: outlen[0]]
+ backend._lib.ERR_clear_error()
+ if res <= 0:
+ raise ValueError("Encryption/decryption failed.")
+ return resbuf
+
+
+def _rsa_sig_determine_padding(
+ backend: "Backend",
+ key: typing.Union["_RSAPrivateKey", "_RSAPublicKey"],
+ padding: AsymmetricPadding,
+ algorithm: typing.Optional[hashes.HashAlgorithm],
+) -> int:
+ if not isinstance(padding, AsymmetricPadding):
+ raise TypeError("Expected provider of AsymmetricPadding.")
+
+ pkey_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
+ backend.openssl_assert(pkey_size > 0)
+
+ if isinstance(padding, PKCS1v15):
+ # Hash algorithm is ignored for PKCS1v15-padding, may be None.
+ padding_enum = backend._lib.RSA_PKCS1_PADDING
+ elif isinstance(padding, PSS):
+ if not isinstance(padding._mgf, MGF1):
+ raise UnsupportedAlgorithm(
+ "Only MGF1 is supported by this backend.",
+ _Reasons.UNSUPPORTED_MGF,
+ )
+
+ # PSS padding requires a hash algorithm
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Expected instance of hashes.HashAlgorithm.")
+
+ # Size of key in bytes - 2 is the maximum
+ # PSS signature length (salt length is checked later)
+ if pkey_size - algorithm.digest_size - 2 < 0:
+ raise ValueError(
+ "Digest too large for key size. Use a larger "
+ "key or different digest."
+ )
+
+ padding_enum = backend._lib.RSA_PKCS1_PSS_PADDING
+ else:
+ raise UnsupportedAlgorithm(
+ f"{padding.name} is not supported by this backend.",
+ _Reasons.UNSUPPORTED_PADDING,
+ )
+
+ return padding_enum
+
+
+# Hash algorithm can be absent (None) to initialize the context without setting
+# any message digest algorithm. This is currently only valid for the PKCS1v15
+# padding type, where it means that the signature data is encoded/decoded
+# as provided, without being wrapped in a DigestInfo structure.
+def _rsa_sig_setup(
+ backend: "Backend",
+ padding: AsymmetricPadding,
+ algorithm: typing.Optional[hashes.HashAlgorithm],
+ key: typing.Union["_RSAPublicKey", "_RSAPrivateKey"],
+ init_func: typing.Callable[[typing.Any], int],
+):
+ padding_enum = _rsa_sig_determine_padding(backend, key, padding, algorithm)
+ pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL)
+ backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
+ pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
+ res = init_func(pkey_ctx)
+ if res != 1:
+ errors = backend._consume_errors()
+ raise ValueError("Unable to sign/verify with this key", errors)
+
+ if algorithm is not None:
+ evp_md = backend._evp_md_non_null_from_algorithm(algorithm)
+ res = backend._lib.EVP_PKEY_CTX_set_signature_md(pkey_ctx, evp_md)
+ if res <= 0:
+ backend._consume_errors()
+ raise UnsupportedAlgorithm(
+ "{} is not supported by this backend for RSA signing.".format(
+ algorithm.name
+ ),
+ _Reasons.UNSUPPORTED_HASH,
+ )
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum)
+ if res <= 0:
+ backend._consume_errors()
+ raise UnsupportedAlgorithm(
+ "{} is not supported for the RSA signature operation.".format(
+ padding.name
+ ),
+ _Reasons.UNSUPPORTED_PADDING,
+ )
+ if isinstance(padding, PSS):
+ assert isinstance(algorithm, hashes.HashAlgorithm)
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
+ pkey_ctx,
+ _get_rsa_pss_salt_length(backend, padding, key, algorithm),
+ )
+ backend.openssl_assert(res > 0)
+
+ mgf1_md = backend._evp_md_non_null_from_algorithm(
+ padding._mgf._algorithm
+ )
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
+ backend.openssl_assert(res > 0)
+
+ return pkey_ctx
+
+
+def _rsa_sig_sign(
+ backend: "Backend",
+ padding: AsymmetricPadding,
+ algorithm: hashes.HashAlgorithm,
+ private_key: "_RSAPrivateKey",
+ data: bytes,
+) -> bytes:
+ pkey_ctx = _rsa_sig_setup(
+ backend,
+ padding,
+ algorithm,
+ private_key,
+ backend._lib.EVP_PKEY_sign_init,
+ )
+ buflen = backend._ffi.new("size_t *")
+ res = backend._lib.EVP_PKEY_sign(
+ pkey_ctx, backend._ffi.NULL, buflen, data, len(data)
+ )
+ backend.openssl_assert(res == 1)
+ buf = backend._ffi.new("unsigned char[]", buflen[0])
+ res = backend._lib.EVP_PKEY_sign(pkey_ctx, buf, buflen, data, len(data))
+ if res != 1:
+ errors = backend._consume_errors()
+ raise ValueError(
+ "Digest or salt length too long for key size. Use a larger key "
+ "or shorter salt length if you are specifying a PSS salt",
+ errors,
+ )
+
+ return backend._ffi.buffer(buf)[:]
+
+
+def _rsa_sig_verify(
+ backend: "Backend",
+ padding: AsymmetricPadding,
+ algorithm: hashes.HashAlgorithm,
+ public_key: "_RSAPublicKey",
+ signature: bytes,
+ data: bytes,
+) -> None:
+ pkey_ctx = _rsa_sig_setup(
+ backend,
+ padding,
+ algorithm,
+ public_key,
+ backend._lib.EVP_PKEY_verify_init,
+ )
+ res = backend._lib.EVP_PKEY_verify(
+ pkey_ctx, signature, len(signature), data, len(data)
+ )
+ # The previous call can return negative numbers in the event of an
+ # error. This is not a signature failure but we need to fail if it
+ # occurs.
+ backend.openssl_assert(res >= 0)
+ if res == 0:
+ backend._consume_errors()
+ raise InvalidSignature
+
+
+def _rsa_sig_recover(
+ backend: "Backend",
+ padding: AsymmetricPadding,
+ algorithm: typing.Optional[hashes.HashAlgorithm],
+ public_key: "_RSAPublicKey",
+ signature: bytes,
+) -> bytes:
+ pkey_ctx = _rsa_sig_setup(
+ backend,
+ padding,
+ algorithm,
+ public_key,
+ backend._lib.EVP_PKEY_verify_recover_init,
+ )
+
+ # Attempt to keep the rest of the code in this function as constant/time
+ # as possible. See the comment in _enc_dec_rsa_pkey_ctx. Note that the
+ # buflen parameter is used even though its value may be undefined in the
+ # error case. Due to the tolerant nature of Python slicing this does not
+ # trigger any exceptions.
+ maxlen = backend._lib.EVP_PKEY_size(public_key._evp_pkey)
+ backend.openssl_assert(maxlen > 0)
+ buf = backend._ffi.new("unsigned char[]", maxlen)
+ buflen = backend._ffi.new("size_t *", maxlen)
+ res = backend._lib.EVP_PKEY_verify_recover(
+ pkey_ctx, buf, buflen, signature, len(signature)
+ )
+ resbuf = backend._ffi.buffer(buf)[: buflen[0]]
+ backend._lib.ERR_clear_error()
+ # Assume that all parameter errors are handled during the setup phase and
+ # any error here is due to invalid signature.
+ if res != 1:
+ raise InvalidSignature
+ return resbuf
+
+
+class _RSAPrivateKey(RSAPrivateKey):
+ _evp_pkey: object
+ _rsa_cdata: object
+ _key_size: int
+
+ def __init__(
+ self,
+ backend: "Backend",
+ rsa_cdata,
+ evp_pkey,
+ *,
+ unsafe_skip_rsa_key_validation: bool,
+ ):
+ res: int
+ # RSA_check_key is slower in OpenSSL 3.0.0 due to improved
+ # primality checking. In normal use this is unlikely to be a problem
+ # since users don't load new keys constantly, but for TESTING we've
+ # added an init arg that allows skipping the checks. You should not
+ # use this in production code unless you understand the consequences.
+ if not unsafe_skip_rsa_key_validation:
+ res = backend._lib.RSA_check_key(rsa_cdata)
+ if res != 1:
+ errors = backend._consume_errors()
+ raise ValueError("Invalid private key", errors)
+ # 2 is prime and passes an RSA key check, so we also check
+ # if p and q are odd just to be safe.
+ p = backend._ffi.new("BIGNUM **")
+ q = backend._ffi.new("BIGNUM **")
+ backend._lib.RSA_get0_factors(rsa_cdata, p, q)
+ backend.openssl_assert(p[0] != backend._ffi.NULL)
+ backend.openssl_assert(q[0] != backend._ffi.NULL)
+ p_odd = backend._lib.BN_is_odd(p[0])
+ q_odd = backend._lib.BN_is_odd(q[0])
+ if p_odd != 1 or q_odd != 1:
+ errors = backend._consume_errors()
+ raise ValueError("Invalid private key", errors)
+
+ self._backend = backend
+ self._rsa_cdata = rsa_cdata
+ self._evp_pkey = evp_pkey
+ # Used for lazy blinding
+ self._blinded = False
+ self._blinding_lock = threading.Lock()
+
+ n = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.RSA_get0_key(
+ self._rsa_cdata,
+ n,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
+ self._key_size = self._backend._lib.BN_num_bits(n[0])
+
+ def _enable_blinding(self) -> None:
+ # If you call blind on an already blinded RSA key OpenSSL will turn
+ # it off and back on, which is a performance hit we want to avoid.
+ if not self._blinded:
+ with self._blinding_lock:
+ self._non_threadsafe_enable_blinding()
+
+ def _non_threadsafe_enable_blinding(self) -> None:
+ # This is only a separate function to allow for testing to cover both
+ # branches. It should never be invoked except through _enable_blinding.
+ # Check if it's not True again in case another thread raced past the
+ # first non-locked check.
+ if not self._blinded:
+ res = self._backend._lib.RSA_blinding_on(
+ self._rsa_cdata, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(res == 1)
+ self._blinded = True
+
+ @property
+ def key_size(self) -> int:
+ return self._key_size
+
+ def decrypt(self, ciphertext: bytes, padding: AsymmetricPadding) -> bytes:
+ self._enable_blinding()
+ key_size_bytes = (self.key_size + 7) // 8
+ if key_size_bytes != len(ciphertext):
+ raise ValueError("Ciphertext length must be equal to key size.")
+
+ return _enc_dec_rsa(self._backend, self, ciphertext, padding)
+
+ def public_key(self) -> RSAPublicKey:
+ ctx = self._backend._lib.RSAPublicKey_dup(self._rsa_cdata)
+ self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
+ ctx = self._backend._ffi.gc(ctx, self._backend._lib.RSA_free)
+ evp_pkey = self._backend._rsa_cdata_to_evp_pkey(ctx)
+ return _RSAPublicKey(self._backend, ctx, evp_pkey)
+
+ def private_numbers(self) -> RSAPrivateNumbers:
+ n = self._backend._ffi.new("BIGNUM **")
+ e = self._backend._ffi.new("BIGNUM **")
+ d = self._backend._ffi.new("BIGNUM **")
+ p = self._backend._ffi.new("BIGNUM **")
+ q = self._backend._ffi.new("BIGNUM **")
+ dmp1 = self._backend._ffi.new("BIGNUM **")
+ dmq1 = self._backend._ffi.new("BIGNUM **")
+ iqmp = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.RSA_get0_key(self._rsa_cdata, n, e, d)
+ self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(e[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(d[0] != self._backend._ffi.NULL)
+ self._backend._lib.RSA_get0_factors(self._rsa_cdata, p, q)
+ self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
+ self._backend._lib.RSA_get0_crt_params(
+ self._rsa_cdata, dmp1, dmq1, iqmp
+ )
+ self._backend.openssl_assert(dmp1[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(dmq1[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(iqmp[0] != self._backend._ffi.NULL)
+ return RSAPrivateNumbers(
+ p=self._backend._bn_to_int(p[0]),
+ q=self._backend._bn_to_int(q[0]),
+ d=self._backend._bn_to_int(d[0]),
+ dmp1=self._backend._bn_to_int(dmp1[0]),
+ dmq1=self._backend._bn_to_int(dmq1[0]),
+ iqmp=self._backend._bn_to_int(iqmp[0]),
+ public_numbers=RSAPublicNumbers(
+ e=self._backend._bn_to_int(e[0]),
+ n=self._backend._bn_to_int(n[0]),
+ ),
+ )
+
+ def private_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PrivateFormat,
+ encryption_algorithm: serialization.KeySerializationEncryption,
+ ) -> bytes:
+ return self._backend._private_key_bytes(
+ encoding,
+ format,
+ encryption_algorithm,
+ self,
+ self._evp_pkey,
+ self._rsa_cdata,
+ )
+
+ def sign(
+ self,
+ data: bytes,
+ padding: AsymmetricPadding,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> bytes:
+ self._enable_blinding()
+ data, algorithm = _calculate_digest_and_algorithm(data, algorithm)
+ return _rsa_sig_sign(self._backend, padding, algorithm, self, data)
+
+
+class _RSAPublicKey(RSAPublicKey):
+ _evp_pkey: object
+ _rsa_cdata: object
+ _key_size: int
+
+ def __init__(self, backend: "Backend", rsa_cdata, evp_pkey):
+ self._backend = backend
+ self._rsa_cdata = rsa_cdata
+ self._evp_pkey = evp_pkey
+
+ n = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.RSA_get0_key(
+ self._rsa_cdata,
+ n,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
+ self._key_size = self._backend._lib.BN_num_bits(n[0])
+
+ @property
+ def key_size(self) -> int:
+ return self._key_size
+
+ def encrypt(self, plaintext: bytes, padding: AsymmetricPadding) -> bytes:
+ return _enc_dec_rsa(self._backend, self, plaintext, padding)
+
+ def public_numbers(self) -> RSAPublicNumbers:
+ n = self._backend._ffi.new("BIGNUM **")
+ e = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.RSA_get0_key(
+ self._rsa_cdata, n, e, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(e[0] != self._backend._ffi.NULL)
+ return RSAPublicNumbers(
+ e=self._backend._bn_to_int(e[0]),
+ n=self._backend._bn_to_int(n[0]),
+ )
+
+ def public_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PublicFormat,
+ ) -> bytes:
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, self._rsa_cdata
+ )
+
+ def verify(
+ self,
+ signature: bytes,
+ data: bytes,
+ padding: AsymmetricPadding,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> None:
+ data, algorithm = _calculate_digest_and_algorithm(data, algorithm)
+ _rsa_sig_verify(
+ self._backend, padding, algorithm, self, signature, data
+ )
+
+ def recover_data_from_signature(
+ self,
+ signature: bytes,
+ padding: AsymmetricPadding,
+ algorithm: typing.Optional[hashes.HashAlgorithm],
+ ) -> bytes:
+ if isinstance(algorithm, asym_utils.Prehashed):
+ raise TypeError(
+ "Prehashed is only supported in the sign and verify methods. "
+ "It cannot be used with recover_data_from_signature."
+ )
+ return _rsa_sig_recover(
+ self._backend, padding, algorithm, self, signature
+ )
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/utils.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/utils.py
new file mode 100644
index 00000000..64b4a833
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/utils.py
@@ -0,0 +1,61 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.asymmetric.utils import Prehashed
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+
+def _evp_pkey_derive(backend: "Backend", evp_pkey, peer_public_key) -> bytes:
+ ctx = backend._lib.EVP_PKEY_CTX_new(evp_pkey, backend._ffi.NULL)
+ backend.openssl_assert(ctx != backend._ffi.NULL)
+ ctx = backend._ffi.gc(ctx, backend._lib.EVP_PKEY_CTX_free)
+ res = backend._lib.EVP_PKEY_derive_init(ctx)
+ backend.openssl_assert(res == 1)
+
+ if backend._lib.Cryptography_HAS_EVP_PKEY_SET_PEER_EX:
+ res = backend._lib.EVP_PKEY_derive_set_peer_ex(
+ ctx, peer_public_key._evp_pkey, 0
+ )
+ else:
+ res = backend._lib.EVP_PKEY_derive_set_peer(
+ ctx, peer_public_key._evp_pkey
+ )
+ backend.openssl_assert(res == 1)
+
+ keylen = backend._ffi.new("size_t *")
+ res = backend._lib.EVP_PKEY_derive(ctx, backend._ffi.NULL, keylen)
+ backend.openssl_assert(res == 1)
+ backend.openssl_assert(keylen[0] > 0)
+ buf = backend._ffi.new("unsigned char[]", keylen[0])
+ res = backend._lib.EVP_PKEY_derive(ctx, buf, keylen)
+ if res != 1:
+ errors = backend._consume_errors()
+ raise ValueError("Error computing shared key.", errors)
+
+ return backend._ffi.buffer(buf, keylen[0])[:]
+
+
+def _calculate_digest_and_algorithm(
+ data: bytes,
+ algorithm: typing.Union[Prehashed, hashes.HashAlgorithm],
+) -> typing.Tuple[bytes, hashes.HashAlgorithm]:
+ if not isinstance(algorithm, Prehashed):
+ hash_ctx = hashes.Hash(algorithm)
+ hash_ctx.update(data)
+ data = hash_ctx.finalize()
+ else:
+ algorithm = algorithm._algorithm
+
+ if len(data) != algorithm.digest_size:
+ raise ValueError(
+ "The provided data must be the same length as the hash "
+ "algorithm's digest size."
+ )
+
+ return (data, algorithm)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/x448.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/x448.py
new file mode 100644
index 00000000..d738188c
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/backends/openssl/x448.py
@@ -0,0 +1,117 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.hazmat.backends.openssl.utils import _evp_pkey_derive
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric.x448 import (
+ X448PrivateKey,
+ X448PublicKey,
+)
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.backend import Backend
+
+_X448_KEY_SIZE = 56
+
+
+class _X448PublicKey(X448PublicKey):
+ def __init__(self, backend: "Backend", evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PublicFormat,
+ ) -> bytes:
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ encoding is not serialization.Encoding.Raw
+ or format is not serialization.PublicFormat.Raw
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw"
+ )
+
+ return self._raw_public_bytes()
+
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
+
+ def _raw_public_bytes(self) -> bytes:
+ buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_public_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE)
+ return self._backend._ffi.buffer(buf, _X448_KEY_SIZE)[:]
+
+
+class _X448PrivateKey(X448PrivateKey):
+ def __init__(self, backend: "Backend", evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_key(self) -> X448PublicKey:
+ buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_public_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE)
+ public_bytes = self._backend._ffi.buffer(buf)[:]
+ return self._backend.x448_load_public_bytes(public_bytes)
+
+ def exchange(self, peer_public_key: X448PublicKey) -> bytes:
+ if not isinstance(peer_public_key, X448PublicKey):
+ raise TypeError("peer_public_key must be X448PublicKey.")
+
+ return _evp_pkey_derive(self._backend, self._evp_pkey, peer_public_key)
+
+ def private_bytes(
+ self,
+ encoding: serialization.Encoding,
+ format: serialization.PrivateFormat,
+ encryption_algorithm: serialization.KeySerializationEncryption,
+ ) -> bytes:
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PrivateFormat.Raw
+ ):
+ if (
+ format is not serialization.PrivateFormat.Raw
+ or encoding is not serialization.Encoding.Raw
+ or not isinstance(
+ encryption_algorithm, serialization.NoEncryption
+ )
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw "
+ "and encryption_algorithm must be NoEncryption()"
+ )
+
+ return self._raw_private_bytes()
+
+ return self._backend._private_key_bytes(
+ encoding, format, encryption_algorithm, self, self._evp_pkey, None
+ )
+
+ def _raw_private_bytes(self) -> bytes:
+ buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_private_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE)
+ return self._backend._ffi.buffer(buf, _X448_KEY_SIZE)[:]
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/__init__.py
new file mode 100644
index 00000000..b5093362
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/__init__.py
@@ -0,0 +1,3 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust.abi3.so b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust.abi3.so
new file mode 100755
index 00000000..1e11f84d
Binary files /dev/null and b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust.abi3.so differ
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi
new file mode 100644
index 00000000..94a37a20
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi
@@ -0,0 +1,34 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import types
+import typing
+
+def check_pkcs7_padding(data: bytes) -> bool: ...
+def check_ansix923_padding(data: bytes) -> bool: ...
+
+class ObjectIdentifier:
+ def __init__(self, val: str) -> None: ...
+ @property
+ def dotted_string(self) -> str: ...
+ @property
+ def _name(self) -> str: ...
+
+T = typing.TypeVar("T")
+
+class FixedPool(typing.Generic[T]):
+ def __init__(
+ self,
+ create: typing.Callable[[], T],
+ ) -> None: ...
+ def acquire(self) -> PoolAcquisition[T]: ...
+
+class PoolAcquisition(typing.Generic[T]):
+ def __enter__(self) -> T: ...
+ def __exit__(
+ self,
+ exc_type: typing.Optional[typing.Type[BaseException]],
+ exc_value: typing.Optional[BaseException],
+ exc_tb: typing.Optional[types.TracebackType],
+ ) -> None: ...
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi
new file mode 100644
index 00000000..80100082
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi
@@ -0,0 +1,8 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+lib = typing.Any
+ffi = typing.Any
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi
new file mode 100644
index 00000000..a8369ba8
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi
@@ -0,0 +1,16 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+class TestCertificate:
+ not_after_tag: int
+ not_before_tag: int
+ issuer_value_tags: typing.List[int]
+ subject_value_tags: typing.List[int]
+
+def decode_dss_signature(signature: bytes) -> typing.Tuple[int, int]: ...
+def encode_dss_signature(r: int, s: int) -> bytes: ...
+def parse_spki_for_data(data: bytes) -> bytes: ...
+def test_parse_certificate(data: bytes) -> TestCertificate: ...
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi
new file mode 100644
index 00000000..4671eb9b
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi
@@ -0,0 +1,25 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
+from cryptography.x509.ocsp import (
+ OCSPRequest,
+ OCSPRequestBuilder,
+ OCSPResponse,
+ OCSPResponseBuilder,
+ OCSPResponseStatus,
+)
+
+def load_der_ocsp_request(data: bytes) -> OCSPRequest: ...
+def load_der_ocsp_response(data: bytes) -> OCSPResponse: ...
+def create_ocsp_request(builder: OCSPRequestBuilder) -> OCSPRequest: ...
+def create_ocsp_response(
+ status: OCSPResponseStatus,
+ builder: typing.Optional[OCSPResponseBuilder],
+ private_key: typing.Optional[PrivateKeyTypes],
+ hash_algorithm: typing.Optional[hashes.HashAlgorithm],
+) -> OCSPResponse: ...
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi
new file mode 100644
index 00000000..c19b6a9b
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi
@@ -0,0 +1,22 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.hazmat.bindings._rust.openssl import x25519
+
+__all__ = ["openssl_version", "raise_openssl_error", "x25519"]
+
+def openssl_version() -> int: ...
+def raise_openssl_error() -> typing.NoReturn: ...
+def capture_error_stack() -> typing.List[OpenSSLError]: ...
+
+class OpenSSLError:
+ @property
+ def lib(self) -> int: ...
+ @property
+ def reason(self) -> int: ...
+ @property
+ def reason_text(self) -> bytes: ...
+ def _lib_reason_match(self, lib: int, reason: int) -> bool: ...
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi
new file mode 100644
index 00000000..90f7cbdd
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi
@@ -0,0 +1,14 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from cryptography.hazmat.primitives.asymmetric import x25519
+
+class X25519PrivateKey: ...
+class X25519PublicKey: ...
+
+def generate_key() -> x25519.X25519PrivateKey: ...
+def private_key_from_ptr(ptr: int) -> x25519.X25519PrivateKey: ...
+def public_key_from_ptr(ptr: int) -> x25519.X25519PublicKey: ...
+def from_private_bytes(data: bytes) -> x25519.X25519PrivateKey: ...
+def from_public_bytes(data: bytes) -> x25519.X25519PublicKey: ...
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi
new file mode 100644
index 00000000..66bd8509
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi
@@ -0,0 +1,15 @@
+import typing
+
+from cryptography import x509
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.serialization import pkcs7
+
+def serialize_certificates(
+ certs: typing.List[x509.Certificate],
+ encoding: serialization.Encoding,
+) -> bytes: ...
+def sign_and_serialize(
+ builder: pkcs7.PKCS7SignatureBuilder,
+ encoding: serialization.Encoding,
+ options: typing.Iterable[pkcs7.PKCS7Options],
+) -> bytes: ...
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi
new file mode 100644
index 00000000..71c8d5c2
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi
@@ -0,0 +1,42 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography import x509
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
+
+def load_pem_x509_certificate(data: bytes) -> x509.Certificate: ...
+def load_pem_x509_certificates(
+ data: bytes,
+) -> typing.List[x509.Certificate]: ...
+def load_der_x509_certificate(data: bytes) -> x509.Certificate: ...
+def load_pem_x509_crl(data: bytes) -> x509.CertificateRevocationList: ...
+def load_der_x509_crl(data: bytes) -> x509.CertificateRevocationList: ...
+def load_pem_x509_csr(data: bytes) -> x509.CertificateSigningRequest: ...
+def load_der_x509_csr(data: bytes) -> x509.CertificateSigningRequest: ...
+def encode_name_bytes(name: x509.Name) -> bytes: ...
+def encode_extension_value(extension: x509.ExtensionType) -> bytes: ...
+def create_x509_certificate(
+ builder: x509.CertificateBuilder,
+ private_key: PrivateKeyTypes,
+ hash_algorithm: typing.Optional[hashes.HashAlgorithm],
+) -> x509.Certificate: ...
+def create_x509_csr(
+ builder: x509.CertificateSigningRequestBuilder,
+ private_key: PrivateKeyTypes,
+ hash_algorithm: typing.Optional[hashes.HashAlgorithm],
+) -> x509.CertificateSigningRequest: ...
+def create_x509_crl(
+ builder: x509.CertificateRevocationListBuilder,
+ private_key: PrivateKeyTypes,
+ hash_algorithm: typing.Optional[hashes.HashAlgorithm],
+) -> x509.CertificateRevocationList: ...
+
+class Sct: ...
+class Certificate: ...
+class RevokedCertificate: ...
+class CertificateRevocationList: ...
+class CertificateSigningRequest: ...
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/__init__.py
new file mode 100644
index 00000000..b5093362
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/__init__.py
@@ -0,0 +1,3 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py
new file mode 100644
index 00000000..c34fc3ae
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py
@@ -0,0 +1,332 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+
+def cryptography_has_set_cert_cb() -> typing.List[str]:
+ return [
+ "SSL_CTX_set_cert_cb",
+ "SSL_set_cert_cb",
+ ]
+
+
+def cryptography_has_ssl_st() -> typing.List[str]:
+ return [
+ "SSL_ST_BEFORE",
+ "SSL_ST_OK",
+ "SSL_ST_INIT",
+ "SSL_ST_RENEGOTIATE",
+ ]
+
+
+def cryptography_has_tls_st() -> typing.List[str]:
+ return [
+ "TLS_ST_BEFORE",
+ "TLS_ST_OK",
+ ]
+
+
+def cryptography_has_scrypt() -> typing.List[str]:
+ return [
+ "EVP_PBE_scrypt",
+ ]
+
+
+def cryptography_has_evp_pkey_dhx() -> typing.List[str]:
+ return [
+ "EVP_PKEY_DHX",
+ "d2i_DHxparams_bio",
+ "i2d_DHxparams_bio",
+ ]
+
+
+def cryptography_has_mem_functions() -> typing.List[str]:
+ return [
+ "Cryptography_CRYPTO_set_mem_functions",
+ ]
+
+
+def cryptography_has_x509_store_ctx_get_issuer() -> typing.List[str]:
+ return [
+ "X509_STORE_set_get_issuer",
+ ]
+
+
+def cryptography_has_ed448() -> typing.List[str]:
+ return [
+ "EVP_PKEY_ED448",
+ "NID_ED448",
+ ]
+
+
+def cryptography_has_ed25519() -> typing.List[str]:
+ return [
+ "NID_ED25519",
+ "EVP_PKEY_ED25519",
+ ]
+
+
+def cryptography_has_poly1305() -> typing.List[str]:
+ return [
+ "NID_poly1305",
+ "EVP_PKEY_POLY1305",
+ ]
+
+
+def cryptography_has_evp_digestfinal_xof() -> typing.List[str]:
+ return [
+ "EVP_DigestFinalXOF",
+ ]
+
+
+def cryptography_has_fips() -> typing.List[str]:
+ return [
+ "FIPS_mode_set",
+ "FIPS_mode",
+ ]
+
+
+def cryptography_has_ssl_sigalgs() -> typing.List[str]:
+ return [
+ "SSL_CTX_set1_sigalgs_list",
+ ]
+
+
+def cryptography_has_psk() -> typing.List[str]:
+ return [
+ "SSL_CTX_use_psk_identity_hint",
+ "SSL_CTX_set_psk_server_callback",
+ "SSL_CTX_set_psk_client_callback",
+ ]
+
+
+def cryptography_has_psk_tlsv13() -> typing.List[str]:
+ return [
+ "SSL_CTX_set_psk_find_session_callback",
+ "SSL_CTX_set_psk_use_session_callback",
+ "Cryptography_SSL_SESSION_new",
+ "SSL_CIPHER_find",
+ "SSL_SESSION_set1_master_key",
+ "SSL_SESSION_set_cipher",
+ "SSL_SESSION_set_protocol_version",
+ ]
+
+
+def cryptography_has_custom_ext() -> typing.List[str]:
+ return [
+ "SSL_CTX_add_client_custom_ext",
+ "SSL_CTX_add_server_custom_ext",
+ "SSL_extension_supported",
+ ]
+
+
+def cryptography_has_openssl_cleanup() -> typing.List[str]:
+ return [
+ "OPENSSL_cleanup",
+ ]
+
+
+def cryptography_has_tlsv13_functions() -> typing.List[str]:
+ return [
+ "SSL_VERIFY_POST_HANDSHAKE",
+ "SSL_CTX_set_ciphersuites",
+ "SSL_verify_client_post_handshake",
+ "SSL_CTX_set_post_handshake_auth",
+ "SSL_set_post_handshake_auth",
+ "SSL_SESSION_get_max_early_data",
+ "SSL_write_early_data",
+ "SSL_read_early_data",
+ "SSL_CTX_set_max_early_data",
+ ]
+
+
+def cryptography_has_raw_key() -> typing.List[str]:
+ return [
+ "EVP_PKEY_new_raw_private_key",
+ "EVP_PKEY_new_raw_public_key",
+ "EVP_PKEY_get_raw_private_key",
+ "EVP_PKEY_get_raw_public_key",
+ ]
+
+
+def cryptography_has_engine() -> typing.List[str]:
+ return [
+ "ENGINE_by_id",
+ "ENGINE_init",
+ "ENGINE_finish",
+ "ENGINE_get_default_RAND",
+ "ENGINE_set_default_RAND",
+ "ENGINE_unregister_RAND",
+ "ENGINE_ctrl_cmd",
+ "ENGINE_free",
+ "ENGINE_get_name",
+ "Cryptography_add_osrandom_engine",
+ "ENGINE_ctrl_cmd_string",
+ "ENGINE_load_builtin_engines",
+ "ENGINE_load_private_key",
+ "ENGINE_load_public_key",
+ "SSL_CTX_set_client_cert_engine",
+ ]
+
+
+def cryptography_has_verified_chain() -> typing.List[str]:
+ return [
+ "SSL_get0_verified_chain",
+ ]
+
+
+def cryptography_has_srtp() -> typing.List[str]:
+ return [
+ "SSL_CTX_set_tlsext_use_srtp",
+ "SSL_set_tlsext_use_srtp",
+ "SSL_get_selected_srtp_profile",
+ ]
+
+
+def cryptography_has_providers() -> typing.List[str]:
+ return [
+ "OSSL_PROVIDER_load",
+ "OSSL_PROVIDER_unload",
+ "ERR_LIB_PROV",
+ "PROV_R_WRONG_FINAL_BLOCK_LENGTH",
+ "PROV_R_BAD_DECRYPT",
+ ]
+
+
+def cryptography_has_op_no_renegotiation() -> typing.List[str]:
+ return [
+ "SSL_OP_NO_RENEGOTIATION",
+ ]
+
+
+def cryptography_has_dtls_get_data_mtu() -> typing.List[str]:
+ return [
+ "DTLS_get_data_mtu",
+ ]
+
+
+def cryptography_has_300_fips() -> typing.List[str]:
+ return [
+ "EVP_default_properties_is_fips_enabled",
+ "EVP_default_properties_enable_fips",
+ ]
+
+
+def cryptography_has_ssl_cookie() -> typing.List[str]:
+ return [
+ "SSL_OP_COOKIE_EXCHANGE",
+ "DTLSv1_listen",
+ "SSL_CTX_set_cookie_generate_cb",
+ "SSL_CTX_set_cookie_verify_cb",
+ ]
+
+
+def cryptography_has_pkcs7_funcs() -> typing.List[str]:
+ return [
+ "SMIME_write_PKCS7",
+ "PEM_write_bio_PKCS7_stream",
+ "PKCS7_sign_add_signer",
+ "PKCS7_final",
+ "PKCS7_verify",
+ "SMIME_read_PKCS7",
+ "PKCS7_get0_signers",
+ ]
+
+
+def cryptography_has_bn_flags() -> typing.List[str]:
+ return [
+ "BN_FLG_CONSTTIME",
+ "BN_set_flags",
+ "BN_prime_checks_for_size",
+ ]
+
+
+def cryptography_has_evp_pkey_dh() -> typing.List[str]:
+ return [
+ "EVP_PKEY_set1_DH",
+ ]
+
+
+def cryptography_has_300_evp_cipher() -> typing.List[str]:
+ return ["EVP_CIPHER_fetch", "EVP_CIPHER_free"]
+
+
+def cryptography_has_unexpected_eof_while_reading() -> typing.List[str]:
+ return ["SSL_R_UNEXPECTED_EOF_WHILE_READING"]
+
+
+def cryptography_has_pkcs12_set_mac() -> typing.List[str]:
+ return ["PKCS12_set_mac"]
+
+
+def cryptography_has_ssl_op_ignore_unexpected_eof() -> typing.List[str]:
+ return [
+ "SSL_OP_IGNORE_UNEXPECTED_EOF",
+ ]
+
+
+def cryptography_has_get_extms_support() -> typing.List[str]:
+ return ["SSL_get_extms_support"]
+
+
+def cryptography_has_evp_pkey_set_peer_ex() -> typing.List[str]:
+ return ["EVP_PKEY_derive_set_peer_ex"]
+
+
+# This is a mapping of
+# {condition: function-returning-names-dependent-on-that-condition} so we can
+# loop over them and delete unsupported names at runtime. It will be removed
+# when cffi supports #if in cdef. We use functions instead of just a dict of
+# lists so we can use coverage to measure which are used.
+CONDITIONAL_NAMES = {
+ "Cryptography_HAS_SET_CERT_CB": cryptography_has_set_cert_cb,
+ "Cryptography_HAS_SSL_ST": cryptography_has_ssl_st,
+ "Cryptography_HAS_TLS_ST": cryptography_has_tls_st,
+ "Cryptography_HAS_SCRYPT": cryptography_has_scrypt,
+ "Cryptography_HAS_EVP_PKEY_DHX": cryptography_has_evp_pkey_dhx,
+ "Cryptography_HAS_MEM_FUNCTIONS": cryptography_has_mem_functions,
+ "Cryptography_HAS_X509_STORE_CTX_GET_ISSUER": (
+ cryptography_has_x509_store_ctx_get_issuer
+ ),
+ "Cryptography_HAS_ED448": cryptography_has_ed448,
+ "Cryptography_HAS_ED25519": cryptography_has_ed25519,
+ "Cryptography_HAS_POLY1305": cryptography_has_poly1305,
+ "Cryptography_HAS_FIPS": cryptography_has_fips,
+ "Cryptography_HAS_SIGALGS": cryptography_has_ssl_sigalgs,
+ "Cryptography_HAS_PSK": cryptography_has_psk,
+ "Cryptography_HAS_PSK_TLSv1_3": cryptography_has_psk_tlsv13,
+ "Cryptography_HAS_CUSTOM_EXT": cryptography_has_custom_ext,
+ "Cryptography_HAS_OPENSSL_CLEANUP": cryptography_has_openssl_cleanup,
+ "Cryptography_HAS_TLSv1_3_FUNCTIONS": cryptography_has_tlsv13_functions,
+ "Cryptography_HAS_RAW_KEY": cryptography_has_raw_key,
+ "Cryptography_HAS_EVP_DIGESTFINAL_XOF": (
+ cryptography_has_evp_digestfinal_xof
+ ),
+ "Cryptography_HAS_ENGINE": cryptography_has_engine,
+ "Cryptography_HAS_VERIFIED_CHAIN": cryptography_has_verified_chain,
+ "Cryptography_HAS_SRTP": cryptography_has_srtp,
+ "Cryptography_HAS_PROVIDERS": cryptography_has_providers,
+ "Cryptography_HAS_OP_NO_RENEGOTIATION": (
+ cryptography_has_op_no_renegotiation
+ ),
+ "Cryptography_HAS_DTLS_GET_DATA_MTU": cryptography_has_dtls_get_data_mtu,
+ "Cryptography_HAS_300_FIPS": cryptography_has_300_fips,
+ "Cryptography_HAS_SSL_COOKIE": cryptography_has_ssl_cookie,
+ "Cryptography_HAS_PKCS7_FUNCS": cryptography_has_pkcs7_funcs,
+ "Cryptography_HAS_BN_FLAGS": cryptography_has_bn_flags,
+ "Cryptography_HAS_EVP_PKEY_DH": cryptography_has_evp_pkey_dh,
+ "Cryptography_HAS_300_EVP_CIPHER": cryptography_has_300_evp_cipher,
+ "Cryptography_HAS_UNEXPECTED_EOF_WHILE_READING": (
+ cryptography_has_unexpected_eof_while_reading
+ ),
+ "Cryptography_HAS_PKCS12_SET_MAC": cryptography_has_pkcs12_set_mac,
+ "Cryptography_HAS_SSL_OP_IGNORE_UNEXPECTED_EOF": (
+ cryptography_has_ssl_op_ignore_unexpected_eof
+ ),
+ "Cryptography_HAS_GET_EXTMS_SUPPORT": cryptography_has_get_extms_support,
+ "Cryptography_HAS_EVP_PKEY_SET_PEER_EX": (
+ cryptography_has_evp_pkey_set_peer_ex
+ ),
+}
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/binding.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/binding.py
new file mode 100644
index 00000000..7327157f
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/bindings/openssl/binding.py
@@ -0,0 +1,208 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import os
+import sys
+import threading
+import types
+import typing
+import warnings
+
+import cryptography
+from cryptography import utils
+from cryptography.exceptions import InternalError
+from cryptography.hazmat.bindings._rust import _openssl, openssl
+from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES
+
+
+def _openssl_assert(
+ lib,
+ ok: bool,
+ errors: typing.Optional[typing.List[openssl.OpenSSLError]] = None,
+) -> None:
+ if not ok:
+ if errors is None:
+ errors = openssl.capture_error_stack()
+
+ raise InternalError(
+ "Unknown OpenSSL error. This error is commonly encountered when "
+ "another library is not cleaning up the OpenSSL error stack. If "
+ "you are using cryptography with another library that uses "
+ "OpenSSL try disabling it before reporting a bug. Otherwise "
+ "please file an issue at https://github.com/pyca/cryptography/"
+ "issues with information on how to reproduce "
+ "this. ({!r})".format(errors),
+ errors,
+ )
+
+
+def _legacy_provider_error(loaded: bool) -> None:
+ if not loaded:
+ raise RuntimeError(
+ "OpenSSL 3.0's legacy provider failed to load. This is a fatal "
+ "error by default, but cryptography supports running without "
+ "legacy algorithms by setting the environment variable "
+ "CRYPTOGRAPHY_OPENSSL_NO_LEGACY. If you did not expect this error,"
+ " you have likely made a mistake with your OpenSSL configuration."
+ )
+
+
+def build_conditional_library(
+ lib: typing.Any,
+ conditional_names: typing.Dict[str, typing.Callable[[], typing.List[str]]],
+) -> typing.Any:
+ conditional_lib = types.ModuleType("lib")
+ conditional_lib._original_lib = lib # type: ignore[attr-defined]
+ excluded_names = set()
+ for condition, names_cb in conditional_names.items():
+ if not getattr(lib, condition):
+ excluded_names.update(names_cb())
+
+ for attr in dir(lib):
+ if attr not in excluded_names:
+ setattr(conditional_lib, attr, getattr(lib, attr))
+
+ return conditional_lib
+
+
+class Binding:
+ """
+ OpenSSL API wrapper.
+ """
+
+ lib: typing.ClassVar = None
+ ffi = _openssl.ffi
+ _lib_loaded = False
+ _init_lock = threading.Lock()
+ _legacy_provider: typing.Any = ffi.NULL
+ _legacy_provider_loaded = False
+ _default_provider: typing.Any = ffi.NULL
+
+ def __init__(self) -> None:
+ self._ensure_ffi_initialized()
+
+ def _enable_fips(self) -> None:
+ # This function enables FIPS mode for OpenSSL 3.0.0 on installs that
+ # have the FIPS provider installed properly.
+ _openssl_assert(self.lib, self.lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER)
+ self._base_provider = self.lib.OSSL_PROVIDER_load(
+ self.ffi.NULL, b"base"
+ )
+ _openssl_assert(self.lib, self._base_provider != self.ffi.NULL)
+ self.lib._fips_provider = self.lib.OSSL_PROVIDER_load(
+ self.ffi.NULL, b"fips"
+ )
+ _openssl_assert(self.lib, self.lib._fips_provider != self.ffi.NULL)
+
+ res = self.lib.EVP_default_properties_enable_fips(self.ffi.NULL, 1)
+ _openssl_assert(self.lib, res == 1)
+
+ @classmethod
+ def _register_osrandom_engine(cls) -> None:
+ # Clear any errors extant in the queue before we start. In many
+ # scenarios other things may be interacting with OpenSSL in the same
+ # process space and it has proven untenable to assume that they will
+ # reliably clear the error queue. Once we clear it here we will
+ # error on any subsequent unexpected item in the stack.
+ cls.lib.ERR_clear_error()
+ if cls.lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE:
+ result = cls.lib.Cryptography_add_osrandom_engine()
+ _openssl_assert(cls.lib, result in (1, 2))
+
+ @classmethod
+ def _ensure_ffi_initialized(cls) -> None:
+ with cls._init_lock:
+ if not cls._lib_loaded:
+ cls.lib = build_conditional_library(
+ _openssl.lib, CONDITIONAL_NAMES
+ )
+ cls._lib_loaded = True
+ cls._register_osrandom_engine()
+ # As of OpenSSL 3.0.0 we must register a legacy cipher provider
+ # to get RC2 (needed for junk asymmetric private key
+ # serialization), RC4, Blowfish, IDEA, SEED, etc. These things
+ # are ugly legacy, but we aren't going to get rid of them
+ # any time soon.
+ if cls.lib.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER:
+ if not os.environ.get("CRYPTOGRAPHY_OPENSSL_NO_LEGACY"):
+ cls._legacy_provider = cls.lib.OSSL_PROVIDER_load(
+ cls.ffi.NULL, b"legacy"
+ )
+ cls._legacy_provider_loaded = (
+ cls._legacy_provider != cls.ffi.NULL
+ )
+ _legacy_provider_error(cls._legacy_provider_loaded)
+
+ cls._default_provider = cls.lib.OSSL_PROVIDER_load(
+ cls.ffi.NULL, b"default"
+ )
+ _openssl_assert(
+ cls.lib, cls._default_provider != cls.ffi.NULL
+ )
+
+ @classmethod
+ def init_static_locks(cls) -> None:
+ cls._ensure_ffi_initialized()
+
+
+def _verify_package_version(version: str) -> None:
+ # Occasionally we run into situations where the version of the Python
+ # package does not match the version of the shared object that is loaded.
+ # This may occur in environments where multiple versions of cryptography
+ # are installed and available in the python path. To avoid errors cropping
+ # up later this code checks that the currently imported package and the
+ # shared object that were loaded have the same version and raise an
+ # ImportError if they do not
+ so_package_version = _openssl.ffi.string(
+ _openssl.lib.CRYPTOGRAPHY_PACKAGE_VERSION
+ )
+ if version.encode("ascii") != so_package_version:
+ raise ImportError(
+ "The version of cryptography does not match the loaded "
+ "shared object. This can happen if you have multiple copies of "
+ "cryptography installed in your Python path. Please try creating "
+ "a new virtual environment to resolve this issue. "
+ "Loaded python version: {}, shared object version: {}".format(
+ version, so_package_version
+ )
+ )
+
+ _openssl_assert(
+ _openssl.lib,
+ _openssl.lib.OpenSSL_version_num() == openssl.openssl_version(),
+ )
+
+
+_verify_package_version(cryptography.__version__)
+
+Binding.init_static_locks()
+
+if (
+ sys.platform == "win32"
+ and os.environ.get("PROCESSOR_ARCHITEW6432") is not None
+):
+ warnings.warn(
+ "You are using cryptography on a 32-bit Python on a 64-bit Windows "
+ "Operating System. Cryptography will be significantly faster if you "
+ "switch to using a 64-bit Python.",
+ UserWarning,
+ stacklevel=2,
+ )
+
+
+def _verify_openssl_version(lib):
+ if (
+ not lib.CRYPTOGRAPHY_OPENSSL_111D_OR_GREATER
+ and not lib.CRYPTOGRAPHY_IS_LIBRESSL
+ and not lib.CRYPTOGRAPHY_IS_BORINGSSL
+ ):
+ warnings.warn(
+ "Support for OpenSSL less than version 1.1.1d is deprecated and "
+ "the next release of cryptography will drop support. Please "
+ "upgrade your OpenSSL to version 1.1.1d or newer.",
+ utils.DeprecatedIn40,
+ )
+
+
+_verify_openssl_version(Binding.lib)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/__init__.py
new file mode 100644
index 00000000..b5093362
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/__init__.py
@@ -0,0 +1,3 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/_asymmetric.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/_asymmetric.py
new file mode 100644
index 00000000..fb815a0e
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/_asymmetric.py
@@ -0,0 +1,17 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import abc
+
+# This exists to break an import cycle. It is normally accessible from the
+# asymmetric padding module.
+
+
+class AsymmetricPadding(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def name(self) -> str:
+ """
+ A string naming this padding (e.g. "PSS", "PKCS1").
+ """
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py
new file mode 100644
index 00000000..b36dccfb
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py
@@ -0,0 +1,43 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import abc
+import typing
+
+# This exists to break an import cycle. It is normally accessible from the
+# ciphers module.
+
+
+class CipherAlgorithm(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def name(self) -> str:
+ """
+ A string naming this mode (e.g. "AES", "Camellia").
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_sizes(self) -> typing.FrozenSet[int]:
+ """
+ Valid key sizes for this algorithm in bits
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The size of the key being used as an integer in bits (e.g. 128, 256).
+ """
+
+
+class BlockCipherAlgorithm(CipherAlgorithm):
+ key: bytes
+
+ @property
+ @abc.abstractmethod
+ def block_size(self) -> int:
+ """
+ The size of a block as an integer in bits (e.g. 64, 128).
+ """
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/_serialization.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/_serialization.py
new file mode 100644
index 00000000..aa41f30d
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/_serialization.py
@@ -0,0 +1,168 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import abc
+import typing
+
+from cryptography import utils
+from cryptography.hazmat.primitives.hashes import HashAlgorithm
+
+# This exists to break an import cycle. These classes are normally accessible
+# from the serialization module.
+
+
+class PBES(utils.Enum):
+ PBESv1SHA1And3KeyTripleDESCBC = "PBESv1 using SHA1 and 3-Key TripleDES"
+ PBESv2SHA256AndAES256CBC = "PBESv2 using SHA256 PBKDF2 and AES256 CBC"
+
+
+class Encoding(utils.Enum):
+ PEM = "PEM"
+ DER = "DER"
+ OpenSSH = "OpenSSH"
+ Raw = "Raw"
+ X962 = "ANSI X9.62"
+ SMIME = "S/MIME"
+
+
+class PrivateFormat(utils.Enum):
+ PKCS8 = "PKCS8"
+ TraditionalOpenSSL = "TraditionalOpenSSL"
+ Raw = "Raw"
+ OpenSSH = "OpenSSH"
+ PKCS12 = "PKCS12"
+
+ def encryption_builder(self) -> "KeySerializationEncryptionBuilder":
+ if self not in (PrivateFormat.OpenSSH, PrivateFormat.PKCS12):
+ raise ValueError(
+ "encryption_builder only supported with PrivateFormat.OpenSSH"
+ " and PrivateFormat.PKCS12"
+ )
+ return KeySerializationEncryptionBuilder(self)
+
+
+class PublicFormat(utils.Enum):
+ SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1"
+ PKCS1 = "Raw PKCS#1"
+ OpenSSH = "OpenSSH"
+ Raw = "Raw"
+ CompressedPoint = "X9.62 Compressed Point"
+ UncompressedPoint = "X9.62 Uncompressed Point"
+
+
+class ParameterFormat(utils.Enum):
+ PKCS3 = "PKCS3"
+
+
+class KeySerializationEncryption(metaclass=abc.ABCMeta):
+ pass
+
+
+class BestAvailableEncryption(KeySerializationEncryption):
+ def __init__(self, password: bytes):
+ if not isinstance(password, bytes) or len(password) == 0:
+ raise ValueError("Password must be 1 or more bytes.")
+
+ self.password = password
+
+
+class NoEncryption(KeySerializationEncryption):
+ pass
+
+
+class KeySerializationEncryptionBuilder:
+ def __init__(
+ self,
+ format: PrivateFormat,
+ *,
+ _kdf_rounds: typing.Optional[int] = None,
+ _hmac_hash: typing.Optional[HashAlgorithm] = None,
+ _key_cert_algorithm: typing.Optional[PBES] = None,
+ ) -> None:
+ self._format = format
+
+ self._kdf_rounds = _kdf_rounds
+ self._hmac_hash = _hmac_hash
+ self._key_cert_algorithm = _key_cert_algorithm
+
+ def kdf_rounds(self, rounds: int) -> "KeySerializationEncryptionBuilder":
+ if self._kdf_rounds is not None:
+ raise ValueError("kdf_rounds already set")
+
+ if not isinstance(rounds, int):
+ raise TypeError("kdf_rounds must be an integer")
+
+ if rounds < 1:
+ raise ValueError("kdf_rounds must be a positive integer")
+
+ return KeySerializationEncryptionBuilder(
+ self._format,
+ _kdf_rounds=rounds,
+ _hmac_hash=self._hmac_hash,
+ _key_cert_algorithm=self._key_cert_algorithm,
+ )
+
+ def hmac_hash(
+ self, algorithm: HashAlgorithm
+ ) -> "KeySerializationEncryptionBuilder":
+ if self._format is not PrivateFormat.PKCS12:
+ raise TypeError(
+ "hmac_hash only supported with PrivateFormat.PKCS12"
+ )
+
+ if self._hmac_hash is not None:
+ raise ValueError("hmac_hash already set")
+ return KeySerializationEncryptionBuilder(
+ self._format,
+ _kdf_rounds=self._kdf_rounds,
+ _hmac_hash=algorithm,
+ _key_cert_algorithm=self._key_cert_algorithm,
+ )
+
+ def key_cert_algorithm(
+ self, algorithm: PBES
+ ) -> "KeySerializationEncryptionBuilder":
+ if self._format is not PrivateFormat.PKCS12:
+ raise TypeError(
+ "key_cert_algorithm only supported with "
+ "PrivateFormat.PKCS12"
+ )
+ if self._key_cert_algorithm is not None:
+ raise ValueError("key_cert_algorithm already set")
+ return KeySerializationEncryptionBuilder(
+ self._format,
+ _kdf_rounds=self._kdf_rounds,
+ _hmac_hash=self._hmac_hash,
+ _key_cert_algorithm=algorithm,
+ )
+
+ def build(self, password: bytes) -> KeySerializationEncryption:
+ if not isinstance(password, bytes) or len(password) == 0:
+ raise ValueError("Password must be 1 or more bytes.")
+
+ return _KeySerializationEncryption(
+ self._format,
+ password,
+ kdf_rounds=self._kdf_rounds,
+ hmac_hash=self._hmac_hash,
+ key_cert_algorithm=self._key_cert_algorithm,
+ )
+
+
+class _KeySerializationEncryption(KeySerializationEncryption):
+ def __init__(
+ self,
+ format: PrivateFormat,
+ password: bytes,
+ *,
+ kdf_rounds: typing.Optional[int],
+ hmac_hash: typing.Optional[HashAlgorithm],
+ key_cert_algorithm: typing.Optional[PBES],
+ ):
+ self._format = format
+ self.password = password
+
+ self._kdf_rounds = kdf_rounds
+ self._hmac_hash = hmac_hash
+ self._key_cert_algorithm = key_cert_algorithm
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py
new file mode 100644
index 00000000..b5093362
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py
@@ -0,0 +1,3 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py
new file mode 100644
index 00000000..debf01e1
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py
@@ -0,0 +1,251 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import abc
+import typing
+
+from cryptography.hazmat.primitives import _serialization
+
+_MIN_MODULUS_SIZE = 512
+
+
+def generate_parameters(
+ generator: int, key_size: int, backend: typing.Any = None
+) -> "DHParameters":
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.generate_dh_parameters(generator, key_size)
+
+
+class DHParameterNumbers:
+ def __init__(self, p: int, g: int, q: typing.Optional[int] = None) -> None:
+ if not isinstance(p, int) or not isinstance(g, int):
+ raise TypeError("p and g must be integers")
+ if q is not None and not isinstance(q, int):
+ raise TypeError("q must be integer or None")
+
+ if g < 2:
+ raise ValueError("DH generator must be 2 or greater")
+
+ if p.bit_length() < _MIN_MODULUS_SIZE:
+ raise ValueError(
+ f"p (modulus) must be at least {_MIN_MODULUS_SIZE}-bit"
+ )
+
+ self._p = p
+ self._g = g
+ self._q = q
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DHParameterNumbers):
+ return NotImplemented
+
+ return (
+ self._p == other._p and self._g == other._g and self._q == other._q
+ )
+
+ def parameters(self, backend: typing.Any = None) -> "DHParameters":
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_dh_parameter_numbers(self)
+
+ @property
+ def p(self) -> int:
+ return self._p
+
+ @property
+ def g(self) -> int:
+ return self._g
+
+ @property
+ def q(self) -> typing.Optional[int]:
+ return self._q
+
+
+class DHPublicNumbers:
+ def __init__(self, y: int, parameter_numbers: DHParameterNumbers) -> None:
+ if not isinstance(y, int):
+ raise TypeError("y must be an integer.")
+
+ if not isinstance(parameter_numbers, DHParameterNumbers):
+ raise TypeError(
+ "parameters must be an instance of DHParameterNumbers."
+ )
+
+ self._y = y
+ self._parameter_numbers = parameter_numbers
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DHPublicNumbers):
+ return NotImplemented
+
+ return (
+ self._y == other._y
+ and self._parameter_numbers == other._parameter_numbers
+ )
+
+ def public_key(self, backend: typing.Any = None) -> "DHPublicKey":
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_dh_public_numbers(self)
+
+ @property
+ def y(self) -> int:
+ return self._y
+
+ @property
+ def parameter_numbers(self) -> DHParameterNumbers:
+ return self._parameter_numbers
+
+
+class DHPrivateNumbers:
+ def __init__(self, x: int, public_numbers: DHPublicNumbers) -> None:
+ if not isinstance(x, int):
+ raise TypeError("x must be an integer.")
+
+ if not isinstance(public_numbers, DHPublicNumbers):
+ raise TypeError(
+ "public_numbers must be an instance of " "DHPublicNumbers."
+ )
+
+ self._x = x
+ self._public_numbers = public_numbers
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DHPrivateNumbers):
+ return NotImplemented
+
+ return (
+ self._x == other._x
+ and self._public_numbers == other._public_numbers
+ )
+
+ def private_key(self, backend: typing.Any = None) -> "DHPrivateKey":
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_dh_private_numbers(self)
+
+ @property
+ def public_numbers(self) -> DHPublicNumbers:
+ return self._public_numbers
+
+ @property
+ def x(self) -> int:
+ return self._x
+
+
+class DHParameters(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def generate_private_key(self) -> "DHPrivateKey":
+ """
+ Generates and returns a DHPrivateKey.
+ """
+
+ @abc.abstractmethod
+ def parameter_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.ParameterFormat,
+ ) -> bytes:
+ """
+ Returns the parameters serialized as bytes.
+ """
+
+ @abc.abstractmethod
+ def parameter_numbers(self) -> DHParameterNumbers:
+ """
+ Returns a DHParameterNumbers.
+ """
+
+
+DHParametersWithSerialization = DHParameters
+
+
+class DHPublicKey(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The bit length of the prime modulus.
+ """
+
+ @abc.abstractmethod
+ def parameters(self) -> DHParameters:
+ """
+ The DHParameters object associated with this public key.
+ """
+
+ @abc.abstractmethod
+ def public_numbers(self) -> DHPublicNumbers:
+ """
+ Returns a DHPublicNumbers.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+
+DHPublicKeyWithSerialization = DHPublicKey
+
+
+class DHPrivateKey(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The bit length of the prime modulus.
+ """
+
+ @abc.abstractmethod
+ def public_key(self) -> DHPublicKey:
+ """
+ The DHPublicKey associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def parameters(self) -> DHParameters:
+ """
+ The DHParameters object associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def exchange(self, peer_public_key: DHPublicKey) -> bytes:
+ """
+ Given peer's DHPublicKey, carry out the key exchange and
+ return shared key as bytes.
+ """
+
+ @abc.abstractmethod
+ def private_numbers(self) -> DHPrivateNumbers:
+ """
+ Returns a DHPrivateNumbers.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+
+DHPrivateKeyWithSerialization = DHPrivateKey
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py
new file mode 100644
index 00000000..6103d809
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py
@@ -0,0 +1,288 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import abc
+import typing
+
+from cryptography.hazmat.primitives import _serialization, hashes
+from cryptography.hazmat.primitives.asymmetric import utils as asym_utils
+
+
+class DSAParameters(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def generate_private_key(self) -> "DSAPrivateKey":
+ """
+ Generates and returns a DSAPrivateKey.
+ """
+
+ @abc.abstractmethod
+ def parameter_numbers(self) -> "DSAParameterNumbers":
+ """
+ Returns a DSAParameterNumbers.
+ """
+
+
+DSAParametersWithNumbers = DSAParameters
+
+
+class DSAPrivateKey(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The bit length of the prime modulus.
+ """
+
+ @abc.abstractmethod
+ def public_key(self) -> "DSAPublicKey":
+ """
+ The DSAPublicKey associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def parameters(self) -> DSAParameters:
+ """
+ The DSAParameters object associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def sign(
+ self,
+ data: bytes,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> bytes:
+ """
+ Signs the data
+ """
+
+ @abc.abstractmethod
+ def private_numbers(self) -> "DSAPrivateNumbers":
+ """
+ Returns a DSAPrivateNumbers.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+
+DSAPrivateKeyWithSerialization = DSAPrivateKey
+
+
+class DSAPublicKey(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The bit length of the prime modulus.
+ """
+
+ @abc.abstractmethod
+ def parameters(self) -> DSAParameters:
+ """
+ The DSAParameters object associated with this public key.
+ """
+
+ @abc.abstractmethod
+ def public_numbers(self) -> "DSAPublicNumbers":
+ """
+ Returns a DSAPublicNumbers.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+ @abc.abstractmethod
+ def verify(
+ self,
+ signature: bytes,
+ data: bytes,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> None:
+ """
+ Verifies the signature of the data.
+ """
+
+
+DSAPublicKeyWithSerialization = DSAPublicKey
+
+
+class DSAParameterNumbers:
+ def __init__(self, p: int, q: int, g: int):
+ if (
+ not isinstance(p, int)
+ or not isinstance(q, int)
+ or not isinstance(g, int)
+ ):
+ raise TypeError(
+ "DSAParameterNumbers p, q, and g arguments must be integers."
+ )
+
+ self._p = p
+ self._q = q
+ self._g = g
+
+ @property
+ def p(self) -> int:
+ return self._p
+
+ @property
+ def q(self) -> int:
+ return self._q
+
+ @property
+ def g(self) -> int:
+ return self._g
+
+ def parameters(self, backend: typing.Any = None) -> DSAParameters:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_dsa_parameter_numbers(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DSAParameterNumbers):
+ return NotImplemented
+
+ return self.p == other.p and self.q == other.q and self.g == other.g
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self=self)
+ )
+
+
+class DSAPublicNumbers:
+ def __init__(self, y: int, parameter_numbers: DSAParameterNumbers):
+ if not isinstance(y, int):
+ raise TypeError("DSAPublicNumbers y argument must be an integer.")
+
+ if not isinstance(parameter_numbers, DSAParameterNumbers):
+ raise TypeError(
+ "parameter_numbers must be a DSAParameterNumbers instance."
+ )
+
+ self._y = y
+ self._parameter_numbers = parameter_numbers
+
+ @property
+ def y(self) -> int:
+ return self._y
+
+ @property
+ def parameter_numbers(self) -> DSAParameterNumbers:
+ return self._parameter_numbers
+
+ def public_key(self, backend: typing.Any = None) -> DSAPublicKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_dsa_public_numbers(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DSAPublicNumbers):
+ return NotImplemented
+
+ return (
+ self.y == other.y
+ and self.parameter_numbers == other.parameter_numbers
+ )
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self=self)
+ )
+
+
+class DSAPrivateNumbers:
+ def __init__(self, x: int, public_numbers: DSAPublicNumbers):
+ if not isinstance(x, int):
+ raise TypeError("DSAPrivateNumbers x argument must be an integer.")
+
+ if not isinstance(public_numbers, DSAPublicNumbers):
+ raise TypeError(
+ "public_numbers must be a DSAPublicNumbers instance."
+ )
+ self._public_numbers = public_numbers
+ self._x = x
+
+ @property
+ def x(self) -> int:
+ return self._x
+
+ @property
+ def public_numbers(self) -> DSAPublicNumbers:
+ return self._public_numbers
+
+ def private_key(self, backend: typing.Any = None) -> DSAPrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_dsa_private_numbers(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DSAPrivateNumbers):
+ return NotImplemented
+
+ return (
+ self.x == other.x and self.public_numbers == other.public_numbers
+ )
+
+
+def generate_parameters(
+ key_size: int, backend: typing.Any = None
+) -> DSAParameters:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.generate_dsa_parameters(key_size)
+
+
+def generate_private_key(
+ key_size: int, backend: typing.Any = None
+) -> DSAPrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.generate_dsa_private_key_and_parameters(key_size)
+
+
+def _check_dsa_parameters(parameters: DSAParameterNumbers) -> None:
+ if parameters.p.bit_length() not in [1024, 2048, 3072, 4096]:
+ raise ValueError(
+ "p must be exactly 1024, 2048, 3072, or 4096 bits long"
+ )
+ if parameters.q.bit_length() not in [160, 224, 256]:
+ raise ValueError("q must be exactly 160, 224, or 256 bits long")
+
+ if not (1 < parameters.g < parameters.p):
+ raise ValueError("g, p don't satisfy 1 < g < p.")
+
+
+def _check_dsa_private_numbers(numbers: DSAPrivateNumbers) -> None:
+ parameters = numbers.public_numbers.parameter_numbers
+ _check_dsa_parameters(parameters)
+ if numbers.x <= 0 or numbers.x >= parameters.q:
+ raise ValueError("x must be > 0 and < q.")
+
+ if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p):
+ raise ValueError("y must be equal to (g ** x % p).")
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py
new file mode 100644
index 00000000..c5df2c27
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py
@@ -0,0 +1,483 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import abc
+import typing
+
+from cryptography import utils
+from cryptography.hazmat._oid import ObjectIdentifier
+from cryptography.hazmat.primitives import _serialization, hashes
+from cryptography.hazmat.primitives.asymmetric import utils as asym_utils
+
+
+class EllipticCurveOID:
+ SECP192R1 = ObjectIdentifier("1.2.840.10045.3.1.1")
+ SECP224R1 = ObjectIdentifier("1.3.132.0.33")
+ SECP256K1 = ObjectIdentifier("1.3.132.0.10")
+ SECP256R1 = ObjectIdentifier("1.2.840.10045.3.1.7")
+ SECP384R1 = ObjectIdentifier("1.3.132.0.34")
+ SECP521R1 = ObjectIdentifier("1.3.132.0.35")
+ BRAINPOOLP256R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.7")
+ BRAINPOOLP384R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.11")
+ BRAINPOOLP512R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.13")
+ SECT163K1 = ObjectIdentifier("1.3.132.0.1")
+ SECT163R2 = ObjectIdentifier("1.3.132.0.15")
+ SECT233K1 = ObjectIdentifier("1.3.132.0.26")
+ SECT233R1 = ObjectIdentifier("1.3.132.0.27")
+ SECT283K1 = ObjectIdentifier("1.3.132.0.16")
+ SECT283R1 = ObjectIdentifier("1.3.132.0.17")
+ SECT409K1 = ObjectIdentifier("1.3.132.0.36")
+ SECT409R1 = ObjectIdentifier("1.3.132.0.37")
+ SECT571K1 = ObjectIdentifier("1.3.132.0.38")
+ SECT571R1 = ObjectIdentifier("1.3.132.0.39")
+
+
+class EllipticCurve(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def name(self) -> str:
+ """
+ The name of the curve. e.g. secp256r1.
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ Bit size of a secret scalar for the curve.
+ """
+
+
+class EllipticCurveSignatureAlgorithm(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def algorithm(
+ self,
+ ) -> typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm]:
+ """
+ The digest algorithm used with this signature.
+ """
+
+
+class EllipticCurvePrivateKey(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def exchange(
+ self, algorithm: "ECDH", peer_public_key: "EllipticCurvePublicKey"
+ ) -> bytes:
+ """
+ Performs a key exchange operation using the provided algorithm with the
+ provided peer's public key.
+ """
+
+ @abc.abstractmethod
+ def public_key(self) -> "EllipticCurvePublicKey":
+ """
+ The EllipticCurvePublicKey for this private key.
+ """
+
+ @property
+ @abc.abstractmethod
+ def curve(self) -> EllipticCurve:
+ """
+ The EllipticCurve that this key is on.
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ Bit size of a secret scalar for the curve.
+ """
+
+ @abc.abstractmethod
+ def sign(
+ self,
+ data: bytes,
+ signature_algorithm: EllipticCurveSignatureAlgorithm,
+ ) -> bytes:
+ """
+ Signs the data
+ """
+
+ @abc.abstractmethod
+ def private_numbers(self) -> "EllipticCurvePrivateNumbers":
+ """
+ Returns an EllipticCurvePrivateNumbers.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+
+EllipticCurvePrivateKeyWithSerialization = EllipticCurvePrivateKey
+
+
+class EllipticCurvePublicKey(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def curve(self) -> EllipticCurve:
+ """
+ The EllipticCurve that this key is on.
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ Bit size of a secret scalar for the curve.
+ """
+
+ @abc.abstractmethod
+ def public_numbers(self) -> "EllipticCurvePublicNumbers":
+ """
+ Returns an EllipticCurvePublicNumbers.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+ @abc.abstractmethod
+ def verify(
+ self,
+ signature: bytes,
+ data: bytes,
+ signature_algorithm: EllipticCurveSignatureAlgorithm,
+ ) -> None:
+ """
+ Verifies the signature of the data.
+ """
+
+ @classmethod
+ def from_encoded_point(
+ cls, curve: EllipticCurve, data: bytes
+ ) -> "EllipticCurvePublicKey":
+ utils._check_bytes("data", data)
+
+ if not isinstance(curve, EllipticCurve):
+ raise TypeError("curve must be an EllipticCurve instance")
+
+ if len(data) == 0:
+ raise ValueError("data must not be an empty byte string")
+
+ if data[0] not in [0x02, 0x03, 0x04]:
+ raise ValueError("Unsupported elliptic curve point type")
+
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.load_elliptic_curve_public_bytes(curve, data)
+
+
+EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey
+
+
+class SECT571R1(EllipticCurve):
+ name = "sect571r1"
+ key_size = 570
+
+
+class SECT409R1(EllipticCurve):
+ name = "sect409r1"
+ key_size = 409
+
+
+class SECT283R1(EllipticCurve):
+ name = "sect283r1"
+ key_size = 283
+
+
+class SECT233R1(EllipticCurve):
+ name = "sect233r1"
+ key_size = 233
+
+
+class SECT163R2(EllipticCurve):
+ name = "sect163r2"
+ key_size = 163
+
+
+class SECT571K1(EllipticCurve):
+ name = "sect571k1"
+ key_size = 571
+
+
+class SECT409K1(EllipticCurve):
+ name = "sect409k1"
+ key_size = 409
+
+
+class SECT283K1(EllipticCurve):
+ name = "sect283k1"
+ key_size = 283
+
+
+class SECT233K1(EllipticCurve):
+ name = "sect233k1"
+ key_size = 233
+
+
+class SECT163K1(EllipticCurve):
+ name = "sect163k1"
+ key_size = 163
+
+
+class SECP521R1(EllipticCurve):
+ name = "secp521r1"
+ key_size = 521
+
+
+class SECP384R1(EllipticCurve):
+ name = "secp384r1"
+ key_size = 384
+
+
+class SECP256R1(EllipticCurve):
+ name = "secp256r1"
+ key_size = 256
+
+
+class SECP256K1(EllipticCurve):
+ name = "secp256k1"
+ key_size = 256
+
+
+class SECP224R1(EllipticCurve):
+ name = "secp224r1"
+ key_size = 224
+
+
+class SECP192R1(EllipticCurve):
+ name = "secp192r1"
+ key_size = 192
+
+
+class BrainpoolP256R1(EllipticCurve):
+ name = "brainpoolP256r1"
+ key_size = 256
+
+
+class BrainpoolP384R1(EllipticCurve):
+ name = "brainpoolP384r1"
+ key_size = 384
+
+
+class BrainpoolP512R1(EllipticCurve):
+ name = "brainpoolP512r1"
+ key_size = 512
+
+
+_CURVE_TYPES: typing.Dict[str, typing.Type[EllipticCurve]] = {
+ "prime192v1": SECP192R1,
+ "prime256v1": SECP256R1,
+ "secp192r1": SECP192R1,
+ "secp224r1": SECP224R1,
+ "secp256r1": SECP256R1,
+ "secp384r1": SECP384R1,
+ "secp521r1": SECP521R1,
+ "secp256k1": SECP256K1,
+ "sect163k1": SECT163K1,
+ "sect233k1": SECT233K1,
+ "sect283k1": SECT283K1,
+ "sect409k1": SECT409K1,
+ "sect571k1": SECT571K1,
+ "sect163r2": SECT163R2,
+ "sect233r1": SECT233R1,
+ "sect283r1": SECT283R1,
+ "sect409r1": SECT409R1,
+ "sect571r1": SECT571R1,
+ "brainpoolP256r1": BrainpoolP256R1,
+ "brainpoolP384r1": BrainpoolP384R1,
+ "brainpoolP512r1": BrainpoolP512R1,
+}
+
+
+class ECDSA(EllipticCurveSignatureAlgorithm):
+ def __init__(
+ self,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ):
+ self._algorithm = algorithm
+
+ @property
+ def algorithm(
+ self,
+ ) -> typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm]:
+ return self._algorithm
+
+
+def generate_private_key(
+ curve: EllipticCurve, backend: typing.Any = None
+) -> EllipticCurvePrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.generate_elliptic_curve_private_key(curve)
+
+
+def derive_private_key(
+ private_value: int,
+ curve: EllipticCurve,
+ backend: typing.Any = None,
+) -> EllipticCurvePrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ if not isinstance(private_value, int):
+ raise TypeError("private_value must be an integer type.")
+
+ if private_value <= 0:
+ raise ValueError("private_value must be a positive integer.")
+
+ if not isinstance(curve, EllipticCurve):
+ raise TypeError("curve must provide the EllipticCurve interface.")
+
+ return ossl.derive_elliptic_curve_private_key(private_value, curve)
+
+
+class EllipticCurvePublicNumbers:
+ def __init__(self, x: int, y: int, curve: EllipticCurve):
+ if not isinstance(x, int) or not isinstance(y, int):
+ raise TypeError("x and y must be integers.")
+
+ if not isinstance(curve, EllipticCurve):
+ raise TypeError("curve must provide the EllipticCurve interface.")
+
+ self._y = y
+ self._x = x
+ self._curve = curve
+
+ def public_key(self, backend: typing.Any = None) -> EllipticCurvePublicKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_elliptic_curve_public_numbers(self)
+
+ @property
+ def curve(self) -> EllipticCurve:
+ return self._curve
+
+ @property
+ def x(self) -> int:
+ return self._x
+
+ @property
+ def y(self) -> int:
+ return self._y
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, EllipticCurvePublicNumbers):
+ return NotImplemented
+
+ return (
+ self.x == other.x
+ and self.y == other.y
+ and self.curve.name == other.curve.name
+ and self.curve.key_size == other.curve.key_size
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.x, self.y, self.curve.name, self.curve.key_size))
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+
+class EllipticCurvePrivateNumbers:
+ def __init__(
+ self, private_value: int, public_numbers: EllipticCurvePublicNumbers
+ ):
+ if not isinstance(private_value, int):
+ raise TypeError("private_value must be an integer.")
+
+ if not isinstance(public_numbers, EllipticCurvePublicNumbers):
+ raise TypeError(
+ "public_numbers must be an EllipticCurvePublicNumbers "
+ "instance."
+ )
+
+ self._private_value = private_value
+ self._public_numbers = public_numbers
+
+ def private_key(
+ self, backend: typing.Any = None
+ ) -> EllipticCurvePrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_elliptic_curve_private_numbers(self)
+
+ @property
+ def private_value(self) -> int:
+ return self._private_value
+
+ @property
+ def public_numbers(self) -> EllipticCurvePublicNumbers:
+ return self._public_numbers
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, EllipticCurvePrivateNumbers):
+ return NotImplemented
+
+ return (
+ self.private_value == other.private_value
+ and self.public_numbers == other.public_numbers
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.private_value, self.public_numbers))
+
+
+class ECDH:
+ pass
+
+
+_OID_TO_CURVE = {
+ EllipticCurveOID.SECP192R1: SECP192R1,
+ EllipticCurveOID.SECP224R1: SECP224R1,
+ EllipticCurveOID.SECP256K1: SECP256K1,
+ EllipticCurveOID.SECP256R1: SECP256R1,
+ EllipticCurveOID.SECP384R1: SECP384R1,
+ EllipticCurveOID.SECP521R1: SECP521R1,
+ EllipticCurveOID.BRAINPOOLP256R1: BrainpoolP256R1,
+ EllipticCurveOID.BRAINPOOLP384R1: BrainpoolP384R1,
+ EllipticCurveOID.BRAINPOOLP512R1: BrainpoolP512R1,
+ EllipticCurveOID.SECT163K1: SECT163K1,
+ EllipticCurveOID.SECT163R2: SECT163R2,
+ EllipticCurveOID.SECT233K1: SECT233K1,
+ EllipticCurveOID.SECT233R1: SECT233R1,
+ EllipticCurveOID.SECT283K1: SECT283K1,
+ EllipticCurveOID.SECT283R1: SECT283R1,
+ EllipticCurveOID.SECT409K1: SECT409K1,
+ EllipticCurveOID.SECT409R1: SECT409R1,
+ EllipticCurveOID.SECT571K1: SECT571K1,
+ EllipticCurveOID.SECT571R1: SECT571R1,
+}
+
+
+def get_curve_for_oid(oid: ObjectIdentifier) -> typing.Type[EllipticCurve]:
+ try:
+ return _OID_TO_CURVE[oid]
+ except KeyError:
+ raise LookupError(
+ "The provided object identifier has no matching elliptic "
+ "curve class"
+ )
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py
new file mode 100644
index 00000000..df34159e
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py
@@ -0,0 +1,111 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import abc
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.primitives import _serialization
+
+_ED25519_KEY_SIZE = 32
+_ED25519_SIG_SIZE = 64
+
+
+class Ed25519PublicKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def from_public_bytes(cls, data: bytes) -> "Ed25519PublicKey":
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed25519_supported():
+ raise UnsupportedAlgorithm(
+ "ed25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed25519_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ The serialized bytes of the public key.
+ """
+
+ def public_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the public key.
+ Equivalent to public_bytes(Raw, Raw).
+ """
+ return self.public_bytes(
+ _serialization.Encoding.Raw, _serialization.PublicFormat.Raw
+ )
+
+ @abc.abstractmethod
+ def verify(self, signature: bytes, data: bytes) -> None:
+ """
+ Verify the signature.
+ """
+
+
+class Ed25519PrivateKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def generate(cls) -> "Ed25519PrivateKey":
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed25519_supported():
+ raise UnsupportedAlgorithm(
+ "ed25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed25519_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data: bytes) -> "Ed25519PrivateKey":
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed25519_supported():
+ raise UnsupportedAlgorithm(
+ "ed25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed25519_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self) -> Ed25519PublicKey:
+ """
+ The Ed25519PublicKey derived from the private key.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ The serialized bytes of the private key.
+ """
+
+ def private_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the private key.
+ Equivalent to private_bytes(Raw, Raw, NoEncryption()).
+ """
+ return self.private_bytes(
+ _serialization.Encoding.Raw,
+ _serialization.PrivateFormat.Raw,
+ _serialization.NoEncryption(),
+ )
+
+ @abc.abstractmethod
+ def sign(self, data: bytes) -> bytes:
+ """
+ Signs the data.
+ """
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py
new file mode 100644
index 00000000..8b0ac1fd
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py
@@ -0,0 +1,107 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import abc
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.primitives import _serialization
+
+
+class Ed448PublicKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def from_public_bytes(cls, data: bytes) -> "Ed448PublicKey":
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed448_supported():
+ raise UnsupportedAlgorithm(
+ "ed448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed448_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ The serialized bytes of the public key.
+ """
+
+ def public_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the public key.
+ Equivalent to public_bytes(Raw, Raw).
+ """
+ return self.public_bytes(
+ _serialization.Encoding.Raw, _serialization.PublicFormat.Raw
+ )
+
+ @abc.abstractmethod
+ def verify(self, signature: bytes, data: bytes) -> None:
+ """
+ Verify the signature.
+ """
+
+
+class Ed448PrivateKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def generate(cls) -> "Ed448PrivateKey":
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed448_supported():
+ raise UnsupportedAlgorithm(
+ "ed448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+ return backend.ed448_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data: bytes) -> "Ed448PrivateKey":
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed448_supported():
+ raise UnsupportedAlgorithm(
+ "ed448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed448_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self) -> Ed448PublicKey:
+ """
+ The Ed448PublicKey derived from the private key.
+ """
+
+ @abc.abstractmethod
+ def sign(self, data: bytes) -> bytes:
+ """
+ Signs the data.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ The serialized bytes of the private key.
+ """
+
+ def private_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the private key.
+ Equivalent to private_bytes(Raw, Raw, NoEncryption()).
+ """
+ return self.private_bytes(
+ _serialization.Encoding.Raw,
+ _serialization.PrivateFormat.Raw,
+ _serialization.NoEncryption(),
+ )
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py
new file mode 100644
index 00000000..dd3c648f
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py
@@ -0,0 +1,101 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import abc
+import typing
+
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives._asymmetric import (
+ AsymmetricPadding as AsymmetricPadding,
+)
+from cryptography.hazmat.primitives.asymmetric import rsa
+
+
+class PKCS1v15(AsymmetricPadding):
+ name = "EMSA-PKCS1-v1_5"
+
+
+class _MaxLength:
+ "Sentinel value for `MAX_LENGTH`."
+
+
+class _Auto:
+ "Sentinel value for `AUTO`."
+
+
+class _DigestLength:
+ "Sentinel value for `DIGEST_LENGTH`."
+
+
+class PSS(AsymmetricPadding):
+ MAX_LENGTH = _MaxLength()
+ AUTO = _Auto()
+ DIGEST_LENGTH = _DigestLength()
+ name = "EMSA-PSS"
+ _salt_length: typing.Union[int, _MaxLength, _Auto, _DigestLength]
+
+ def __init__(
+ self,
+ mgf: "MGF",
+ salt_length: typing.Union[int, _MaxLength, _Auto, _DigestLength],
+ ) -> None:
+ self._mgf = mgf
+
+ if not isinstance(
+ salt_length, (int, _MaxLength, _Auto, _DigestLength)
+ ):
+ raise TypeError(
+ "salt_length must be an integer, MAX_LENGTH, "
+ "DIGEST_LENGTH, or AUTO"
+ )
+
+ if isinstance(salt_length, int) and salt_length < 0:
+ raise ValueError("salt_length must be zero or greater.")
+
+ self._salt_length = salt_length
+
+
+class OAEP(AsymmetricPadding):
+ name = "EME-OAEP"
+
+ def __init__(
+ self,
+ mgf: "MGF",
+ algorithm: hashes.HashAlgorithm,
+ label: typing.Optional[bytes],
+ ):
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Expected instance of hashes.HashAlgorithm.")
+
+ self._mgf = mgf
+ self._algorithm = algorithm
+ self._label = label
+
+
+class MGF(metaclass=abc.ABCMeta):
+ _algorithm: hashes.HashAlgorithm
+
+
+class MGF1(MGF):
+ MAX_LENGTH = _MaxLength()
+
+ def __init__(self, algorithm: hashes.HashAlgorithm):
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Expected instance of hashes.HashAlgorithm.")
+
+ self._algorithm = algorithm
+
+
+def calculate_max_pss_salt_length(
+ key: typing.Union["rsa.RSAPrivateKey", "rsa.RSAPublicKey"],
+ hash_algorithm: hashes.HashAlgorithm,
+) -> int:
+ if not isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)):
+ raise TypeError("key must be an RSA public or private key")
+ # bit length - 1 per RFC 3447
+ emlen = (key.key_size + 6) // 8
+ salt_length = emlen - hash_algorithm.digest_size - 2
+ assert salt_length >= 0
+ return salt_length
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py
new file mode 100644
index 00000000..81f5a0ec
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py
@@ -0,0 +1,432 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import abc
+import typing
+from math import gcd
+
+from cryptography.hazmat.primitives import _serialization, hashes
+from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding
+from cryptography.hazmat.primitives.asymmetric import utils as asym_utils
+
+
+class RSAPrivateKey(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def decrypt(self, ciphertext: bytes, padding: AsymmetricPadding) -> bytes:
+ """
+ Decrypts the provided ciphertext.
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The bit length of the public modulus.
+ """
+
+ @abc.abstractmethod
+ def public_key(self) -> "RSAPublicKey":
+ """
+ The RSAPublicKey associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def sign(
+ self,
+ data: bytes,
+ padding: AsymmetricPadding,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> bytes:
+ """
+ Signs the data.
+ """
+
+ @abc.abstractmethod
+ def private_numbers(self) -> "RSAPrivateNumbers":
+ """
+ Returns an RSAPrivateNumbers.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+
+RSAPrivateKeyWithSerialization = RSAPrivateKey
+
+
+class RSAPublicKey(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def encrypt(self, plaintext: bytes, padding: AsymmetricPadding) -> bytes:
+ """
+ Encrypts the given plaintext.
+ """
+
+ @property
+ @abc.abstractmethod
+ def key_size(self) -> int:
+ """
+ The bit length of the public modulus.
+ """
+
+ @abc.abstractmethod
+ def public_numbers(self) -> "RSAPublicNumbers":
+ """
+ Returns an RSAPublicNumbers
+ """
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ Returns the key serialized as bytes.
+ """
+
+ @abc.abstractmethod
+ def verify(
+ self,
+ signature: bytes,
+ data: bytes,
+ padding: AsymmetricPadding,
+ algorithm: typing.Union[asym_utils.Prehashed, hashes.HashAlgorithm],
+ ) -> None:
+ """
+ Verifies the signature of the data.
+ """
+
+ @abc.abstractmethod
+ def recover_data_from_signature(
+ self,
+ signature: bytes,
+ padding: AsymmetricPadding,
+ algorithm: typing.Optional[hashes.HashAlgorithm],
+ ) -> bytes:
+ """
+ Recovers the original data from the signature.
+ """
+
+
+RSAPublicKeyWithSerialization = RSAPublicKey
+
+
+def generate_private_key(
+ public_exponent: int,
+ key_size: int,
+ backend: typing.Any = None,
+) -> RSAPrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ _verify_rsa_parameters(public_exponent, key_size)
+ return ossl.generate_rsa_private_key(public_exponent, key_size)
+
+
+def _verify_rsa_parameters(public_exponent: int, key_size: int) -> None:
+ if public_exponent not in (3, 65537):
+ raise ValueError(
+ "public_exponent must be either 3 (for legacy compatibility) or "
+ "65537. Almost everyone should choose 65537 here!"
+ )
+
+ if key_size < 512:
+ raise ValueError("key_size must be at least 512-bits.")
+
+
+def _check_private_key_components(
+ p: int,
+ q: int,
+ private_exponent: int,
+ dmp1: int,
+ dmq1: int,
+ iqmp: int,
+ public_exponent: int,
+ modulus: int,
+) -> None:
+ if modulus < 3:
+ raise ValueError("modulus must be >= 3.")
+
+ if p >= modulus:
+ raise ValueError("p must be < modulus.")
+
+ if q >= modulus:
+ raise ValueError("q must be < modulus.")
+
+ if dmp1 >= modulus:
+ raise ValueError("dmp1 must be < modulus.")
+
+ if dmq1 >= modulus:
+ raise ValueError("dmq1 must be < modulus.")
+
+ if iqmp >= modulus:
+ raise ValueError("iqmp must be < modulus.")
+
+ if private_exponent >= modulus:
+ raise ValueError("private_exponent must be < modulus.")
+
+ if public_exponent < 3 or public_exponent >= modulus:
+ raise ValueError("public_exponent must be >= 3 and < modulus.")
+
+ if public_exponent & 1 == 0:
+ raise ValueError("public_exponent must be odd.")
+
+ if dmp1 & 1 == 0:
+ raise ValueError("dmp1 must be odd.")
+
+ if dmq1 & 1 == 0:
+ raise ValueError("dmq1 must be odd.")
+
+ if p * q != modulus:
+ raise ValueError("p*q must equal modulus.")
+
+
+def _check_public_key_components(e: int, n: int) -> None:
+ if n < 3:
+ raise ValueError("n must be >= 3.")
+
+ if e < 3 or e >= n:
+ raise ValueError("e must be >= 3 and < n.")
+
+ if e & 1 == 0:
+ raise ValueError("e must be odd.")
+
+
+def _modinv(e: int, m: int) -> int:
+ """
+ Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1
+ """
+ x1, x2 = 1, 0
+ a, b = e, m
+ while b > 0:
+ q, r = divmod(a, b)
+ xn = x1 - q * x2
+ a, b, x1, x2 = b, r, x2, xn
+ return x1 % m
+
+
+def rsa_crt_iqmp(p: int, q: int) -> int:
+ """
+ Compute the CRT (q ** -1) % p value from RSA primes p and q.
+ """
+ return _modinv(q, p)
+
+
+def rsa_crt_dmp1(private_exponent: int, p: int) -> int:
+ """
+ Compute the CRT private_exponent % (p - 1) value from the RSA
+ private_exponent (d) and p.
+ """
+ return private_exponent % (p - 1)
+
+
+def rsa_crt_dmq1(private_exponent: int, q: int) -> int:
+ """
+ Compute the CRT private_exponent % (q - 1) value from the RSA
+ private_exponent (d) and q.
+ """
+ return private_exponent % (q - 1)
+
+
+# Controls the number of iterations rsa_recover_prime_factors will perform
+# to obtain the prime factors. Each iteration increments by 2 so the actual
+# maximum attempts is half this number.
+_MAX_RECOVERY_ATTEMPTS = 1000
+
+
+def rsa_recover_prime_factors(
+ n: int, e: int, d: int
+) -> typing.Tuple[int, int]:
+ """
+ Compute factors p and q from the private exponent d. We assume that n has
+ no more than two factors. This function is adapted from code in PyCrypto.
+ """
+ # See 8.2.2(i) in Handbook of Applied Cryptography.
+ ktot = d * e - 1
+ # The quantity d*e-1 is a multiple of phi(n), even,
+ # and can be represented as t*2^s.
+ t = ktot
+ while t % 2 == 0:
+ t = t // 2
+ # Cycle through all multiplicative inverses in Zn.
+ # The algorithm is non-deterministic, but there is a 50% chance
+ # any candidate a leads to successful factoring.
+ # See "Digitalized Signatures and Public Key Functions as Intractable
+ # as Factorization", M. Rabin, 1979
+ spotted = False
+ a = 2
+ while not spotted and a < _MAX_RECOVERY_ATTEMPTS:
+ k = t
+ # Cycle through all values a^{t*2^i}=a^k
+ while k < ktot:
+ cand = pow(a, k, n)
+ # Check if a^k is a non-trivial root of unity (mod n)
+ if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1:
+ # We have found a number such that (cand-1)(cand+1)=0 (mod n).
+ # Either of the terms divides n.
+ p = gcd(cand + 1, n)
+ spotted = True
+ break
+ k *= 2
+ # This value was not any good... let's try another!
+ a += 2
+ if not spotted:
+ raise ValueError("Unable to compute factors p and q from exponent d.")
+ # Found !
+ q, r = divmod(n, p)
+ assert r == 0
+ p, q = sorted((p, q), reverse=True)
+ return (p, q)
+
+
+class RSAPrivateNumbers:
+ def __init__(
+ self,
+ p: int,
+ q: int,
+ d: int,
+ dmp1: int,
+ dmq1: int,
+ iqmp: int,
+ public_numbers: "RSAPublicNumbers",
+ ):
+ if (
+ not isinstance(p, int)
+ or not isinstance(q, int)
+ or not isinstance(d, int)
+ or not isinstance(dmp1, int)
+ or not isinstance(dmq1, int)
+ or not isinstance(iqmp, int)
+ ):
+ raise TypeError(
+ "RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must"
+ " all be an integers."
+ )
+
+ if not isinstance(public_numbers, RSAPublicNumbers):
+ raise TypeError(
+ "RSAPrivateNumbers public_numbers must be an RSAPublicNumbers"
+ " instance."
+ )
+
+ self._p = p
+ self._q = q
+ self._d = d
+ self._dmp1 = dmp1
+ self._dmq1 = dmq1
+ self._iqmp = iqmp
+ self._public_numbers = public_numbers
+
+ @property
+ def p(self) -> int:
+ return self._p
+
+ @property
+ def q(self) -> int:
+ return self._q
+
+ @property
+ def d(self) -> int:
+ return self._d
+
+ @property
+ def dmp1(self) -> int:
+ return self._dmp1
+
+ @property
+ def dmq1(self) -> int:
+ return self._dmq1
+
+ @property
+ def iqmp(self) -> int:
+ return self._iqmp
+
+ @property
+ def public_numbers(self) -> "RSAPublicNumbers":
+ return self._public_numbers
+
+ def private_key(
+ self,
+ backend: typing.Any = None,
+ *,
+ unsafe_skip_rsa_key_validation: bool = False,
+ ) -> RSAPrivateKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_rsa_private_numbers(
+ self, unsafe_skip_rsa_key_validation
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, RSAPrivateNumbers):
+ return NotImplemented
+
+ return (
+ self.p == other.p
+ and self.q == other.q
+ and self.d == other.d
+ and self.dmp1 == other.dmp1
+ and self.dmq1 == other.dmq1
+ and self.iqmp == other.iqmp
+ and self.public_numbers == other.public_numbers
+ )
+
+ def __hash__(self) -> int:
+ return hash(
+ (
+ self.p,
+ self.q,
+ self.d,
+ self.dmp1,
+ self.dmq1,
+ self.iqmp,
+ self.public_numbers,
+ )
+ )
+
+
+class RSAPublicNumbers:
+ def __init__(self, e: int, n: int):
+ if not isinstance(e, int) or not isinstance(n, int):
+ raise TypeError("RSAPublicNumbers arguments must be integers.")
+
+ self._e = e
+ self._n = n
+
+ @property
+ def e(self) -> int:
+ return self._e
+
+ @property
+ def n(self) -> int:
+ return self._n
+
+ def public_key(self, backend: typing.Any = None) -> RSAPublicKey:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ return ossl.load_rsa_public_numbers(self)
+
+ def __repr__(self) -> str:
+ return "".format(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, RSAPublicNumbers):
+ return NotImplemented
+
+ return self.e == other.e and self.n == other.n
+
+ def __hash__(self) -> int:
+ return hash((self.e, self.n))
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/types.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/types.py
new file mode 100644
index 00000000..e911a9f6
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/types.py
@@ -0,0 +1,109 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography import utils
+from cryptography.hazmat.primitives.asymmetric import (
+ dh,
+ dsa,
+ ec,
+ ed448,
+ ed25519,
+ rsa,
+ x448,
+ x25519,
+)
+
+# Every asymmetric key type
+PublicKeyTypes = typing.Union[
+ dh.DHPublicKey,
+ dsa.DSAPublicKey,
+ rsa.RSAPublicKey,
+ ec.EllipticCurvePublicKey,
+ ed25519.Ed25519PublicKey,
+ ed448.Ed448PublicKey,
+ x25519.X25519PublicKey,
+ x448.X448PublicKey,
+]
+PUBLIC_KEY_TYPES = PublicKeyTypes
+utils.deprecated(
+ PUBLIC_KEY_TYPES,
+ __name__,
+ "Use PublicKeyTypes instead",
+ utils.DeprecatedIn40,
+ name="PUBLIC_KEY_TYPES",
+)
+# Every asymmetric key type
+PrivateKeyTypes = typing.Union[
+ dh.DHPrivateKey,
+ ed25519.Ed25519PrivateKey,
+ ed448.Ed448PrivateKey,
+ rsa.RSAPrivateKey,
+ dsa.DSAPrivateKey,
+ ec.EllipticCurvePrivateKey,
+ x25519.X25519PrivateKey,
+ x448.X448PrivateKey,
+]
+PRIVATE_KEY_TYPES = PrivateKeyTypes
+utils.deprecated(
+ PRIVATE_KEY_TYPES,
+ __name__,
+ "Use PrivateKeyTypes instead",
+ utils.DeprecatedIn40,
+ name="PRIVATE_KEY_TYPES",
+)
+# Just the key types we allow to be used for x509 signing. This mirrors
+# the certificate public key types
+CertificateIssuerPrivateKeyTypes = typing.Union[
+ ed25519.Ed25519PrivateKey,
+ ed448.Ed448PrivateKey,
+ rsa.RSAPrivateKey,
+ dsa.DSAPrivateKey,
+ ec.EllipticCurvePrivateKey,
+]
+CERTIFICATE_PRIVATE_KEY_TYPES = CertificateIssuerPrivateKeyTypes
+utils.deprecated(
+ CERTIFICATE_PRIVATE_KEY_TYPES,
+ __name__,
+ "Use CertificateIssuerPrivateKeyTypes instead",
+ utils.DeprecatedIn40,
+ name="CERTIFICATE_PRIVATE_KEY_TYPES",
+)
+# Just the key types we allow to be used for x509 signing. This mirrors
+# the certificate private key types
+CertificateIssuerPublicKeyTypes = typing.Union[
+ dsa.DSAPublicKey,
+ rsa.RSAPublicKey,
+ ec.EllipticCurvePublicKey,
+ ed25519.Ed25519PublicKey,
+ ed448.Ed448PublicKey,
+]
+CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES = CertificateIssuerPublicKeyTypes
+utils.deprecated(
+ CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES,
+ __name__,
+ "Use CertificateIssuerPublicKeyTypes instead",
+ utils.DeprecatedIn40,
+ name="CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES",
+)
+# This type removes DHPublicKey. x448/x25519 can be a public key
+# but cannot be used in signing so they are allowed here.
+CertificatePublicKeyTypes = typing.Union[
+ dsa.DSAPublicKey,
+ rsa.RSAPublicKey,
+ ec.EllipticCurvePublicKey,
+ ed25519.Ed25519PublicKey,
+ ed448.Ed448PublicKey,
+ x25519.X25519PublicKey,
+ x448.X448PublicKey,
+]
+CERTIFICATE_PUBLIC_KEY_TYPES = CertificatePublicKeyTypes
+utils.deprecated(
+ CERTIFICATE_PUBLIC_KEY_TYPES,
+ __name__,
+ "Use CertificatePublicKeyTypes instead",
+ utils.DeprecatedIn40,
+ name="CERTIFICATE_PUBLIC_KEY_TYPES",
+)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py
new file mode 100644
index 00000000..140ca196
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py
@@ -0,0 +1,23 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+from cryptography.hazmat.bindings._rust import asn1
+from cryptography.hazmat.primitives import hashes
+
+decode_dss_signature = asn1.decode_dss_signature
+encode_dss_signature = asn1.encode_dss_signature
+
+
+class Prehashed:
+ def __init__(self, algorithm: hashes.HashAlgorithm):
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Expected instance of HashAlgorithm.")
+
+ self._algorithm = algorithm
+ self._digest_size = algorithm.digest_size
+
+ @property
+ def digest_size(self) -> int:
+ return self._digest_size
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py
new file mode 100644
index 00000000..fb21fe17
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py
@@ -0,0 +1,106 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import abc
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.bindings._rust import openssl as rust_openssl
+from cryptography.hazmat.primitives import _serialization
+
+
+class X25519PublicKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def from_public_bytes(cls, data: bytes) -> "X25519PublicKey":
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x25519_supported():
+ raise UnsupportedAlgorithm(
+ "X25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ return backend.x25519_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ The serialized bytes of the public key.
+ """
+
+ @abc.abstractmethod
+ def public_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the public key.
+ Equivalent to public_bytes(Raw, Raw).
+ """
+
+
+# For LibreSSL
+if hasattr(rust_openssl, "x25519"):
+ X25519PublicKey.register(rust_openssl.x25519.X25519PublicKey)
+
+
+class X25519PrivateKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def generate(cls) -> "X25519PrivateKey":
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x25519_supported():
+ raise UnsupportedAlgorithm(
+ "X25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+ return backend.x25519_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data: bytes) -> "X25519PrivateKey":
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x25519_supported():
+ raise UnsupportedAlgorithm(
+ "X25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ return backend.x25519_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self) -> X25519PublicKey:
+ """
+ Returns the public key assosciated with this private key
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ The serialized bytes of the private key.
+ """
+
+ @abc.abstractmethod
+ def private_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the private key.
+ Equivalent to private_bytes(Raw, Raw, NoEncryption()).
+ """
+
+ @abc.abstractmethod
+ def exchange(self, peer_public_key: X25519PublicKey) -> bytes:
+ """
+ Performs a key exchange operation using the provided peer's public key.
+ """
+
+
+# For LibreSSL
+if hasattr(rust_openssl, "x25519"):
+ X25519PrivateKey.register(rust_openssl.x25519.X25519PrivateKey)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py
new file mode 100644
index 00000000..dcab0445
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py
@@ -0,0 +1,101 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import abc
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.primitives import _serialization
+
+
+class X448PublicKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def from_public_bytes(cls, data: bytes) -> "X448PublicKey":
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x448_supported():
+ raise UnsupportedAlgorithm(
+ "X448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ return backend.x448_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PublicFormat,
+ ) -> bytes:
+ """
+ The serialized bytes of the public key.
+ """
+
+ def public_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the public key.
+ Equivalent to public_bytes(Raw, Raw).
+ """
+ return self.public_bytes(
+ _serialization.Encoding.Raw, _serialization.PublicFormat.Raw
+ )
+
+
+class X448PrivateKey(metaclass=abc.ABCMeta):
+ @classmethod
+ def generate(cls) -> "X448PrivateKey":
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x448_supported():
+ raise UnsupportedAlgorithm(
+ "X448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+ return backend.x448_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data: bytes) -> "X448PrivateKey":
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x448_supported():
+ raise UnsupportedAlgorithm(
+ "X448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ return backend.x448_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self) -> X448PublicKey:
+ """
+ Returns the public key associated with this private key
+ """
+
+ @abc.abstractmethod
+ def private_bytes(
+ self,
+ encoding: _serialization.Encoding,
+ format: _serialization.PrivateFormat,
+ encryption_algorithm: _serialization.KeySerializationEncryption,
+ ) -> bytes:
+ """
+ The serialized bytes of the private key.
+ """
+
+ def private_bytes_raw(self) -> bytes:
+ """
+ The raw bytes of the private key.
+ Equivalent to private_bytes(Raw, Raw, NoEncryption()).
+ """
+ return self.private_bytes(
+ _serialization.Encoding.Raw,
+ _serialization.PrivateFormat.Raw,
+ _serialization.NoEncryption(),
+ )
+
+ @abc.abstractmethod
+ def exchange(self, peer_public_key: X448PublicKey) -> bytes:
+ """
+ Performs a key exchange operation using the provided peer's public key.
+ """
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py
new file mode 100644
index 00000000..95f02842
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py
@@ -0,0 +1,26 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+from cryptography.hazmat.primitives._cipheralgorithm import (
+ BlockCipherAlgorithm,
+ CipherAlgorithm,
+)
+from cryptography.hazmat.primitives.ciphers.base import (
+ AEADCipherContext,
+ AEADDecryptionContext,
+ AEADEncryptionContext,
+ Cipher,
+ CipherContext,
+)
+
+__all__ = [
+ "Cipher",
+ "CipherAlgorithm",
+ "BlockCipherAlgorithm",
+ "CipherContext",
+ "AEADCipherContext",
+ "AEADDecryptionContext",
+ "AEADEncryptionContext",
+]
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/aead.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/aead.py
new file mode 100644
index 00000000..f2e206bb
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/aead.py
@@ -0,0 +1,377 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import os
+import typing
+
+from cryptography import exceptions, utils
+from cryptography.hazmat.backends.openssl import aead
+from cryptography.hazmat.backends.openssl.backend import backend
+from cryptography.hazmat.bindings._rust import FixedPool
+
+
+class ChaCha20Poly1305:
+ _MAX_SIZE = 2**31 - 1
+
+ def __init__(self, key: bytes):
+ if not backend.aead_cipher_supported(self):
+ raise exceptions.UnsupportedAlgorithm(
+ "ChaCha20Poly1305 is not supported by this version of OpenSSL",
+ exceptions._Reasons.UNSUPPORTED_CIPHER,
+ )
+ utils._check_byteslike("key", key)
+
+ if len(key) != 32:
+ raise ValueError("ChaCha20Poly1305 key must be 32 bytes.")
+
+ self._key = key
+ self._pool = FixedPool(self._create_fn)
+
+ @classmethod
+ def generate_key(cls) -> bytes:
+ return os.urandom(32)
+
+ def _create_fn(self):
+ return aead._aead_create_ctx(backend, self, self._key)
+
+ def encrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**31 - 1 bytes"
+ )
+
+ self._check_params(nonce, data, associated_data)
+ with self._pool.acquire() as ctx:
+ return aead._encrypt(
+ backend, self, nonce, data, [associated_data], 16, ctx
+ )
+
+ def decrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ self._check_params(nonce, data, associated_data)
+ with self._pool.acquire() as ctx:
+ return aead._decrypt(
+ backend, self, nonce, data, [associated_data], 16, ctx
+ )
+
+ def _check_params(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: bytes,
+ ) -> None:
+ utils._check_byteslike("nonce", nonce)
+ utils._check_byteslike("data", data)
+ utils._check_byteslike("associated_data", associated_data)
+ if len(nonce) != 12:
+ raise ValueError("Nonce must be 12 bytes")
+
+
+class AESCCM:
+ _MAX_SIZE = 2**31 - 1
+
+ def __init__(self, key: bytes, tag_length: int = 16):
+ utils._check_byteslike("key", key)
+ if len(key) not in (16, 24, 32):
+ raise ValueError("AESCCM key must be 128, 192, or 256 bits.")
+
+ self._key = key
+ if not isinstance(tag_length, int):
+ raise TypeError("tag_length must be an integer")
+
+ if tag_length not in (4, 6, 8, 10, 12, 14, 16):
+ raise ValueError("Invalid tag_length")
+
+ self._tag_length = tag_length
+
+ if not backend.aead_cipher_supported(self):
+ raise exceptions.UnsupportedAlgorithm(
+ "AESCCM is not supported by this version of OpenSSL",
+ exceptions._Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ @classmethod
+ def generate_key(cls, bit_length: int) -> bytes:
+ if not isinstance(bit_length, int):
+ raise TypeError("bit_length must be an integer")
+
+ if bit_length not in (128, 192, 256):
+ raise ValueError("bit_length must be 128, 192, or 256")
+
+ return os.urandom(bit_length // 8)
+
+ def encrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**31 - 1 bytes"
+ )
+
+ self._check_params(nonce, data, associated_data)
+ self._validate_lengths(nonce, len(data))
+ return aead._encrypt(
+ backend, self, nonce, data, [associated_data], self._tag_length
+ )
+
+ def decrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ self._check_params(nonce, data, associated_data)
+ return aead._decrypt(
+ backend, self, nonce, data, [associated_data], self._tag_length
+ )
+
+ def _validate_lengths(self, nonce: bytes, data_len: int) -> None:
+ # For information about computing this, see
+ # https://tools.ietf.org/html/rfc3610#section-2.1
+ l_val = 15 - len(nonce)
+ if 2 ** (8 * l_val) < data_len:
+ raise ValueError("Data too long for nonce")
+
+ def _check_params(
+ self, nonce: bytes, data: bytes, associated_data: bytes
+ ) -> None:
+ utils._check_byteslike("nonce", nonce)
+ utils._check_byteslike("data", data)
+ utils._check_byteslike("associated_data", associated_data)
+ if not 7 <= len(nonce) <= 13:
+ raise ValueError("Nonce must be between 7 and 13 bytes")
+
+
+class AESGCM:
+ _MAX_SIZE = 2**31 - 1
+
+ def __init__(self, key: bytes):
+ utils._check_byteslike("key", key)
+ if len(key) not in (16, 24, 32):
+ raise ValueError("AESGCM key must be 128, 192, or 256 bits.")
+
+ self._key = key
+
+ @classmethod
+ def generate_key(cls, bit_length: int) -> bytes:
+ if not isinstance(bit_length, int):
+ raise TypeError("bit_length must be an integer")
+
+ if bit_length not in (128, 192, 256):
+ raise ValueError("bit_length must be 128, 192, or 256")
+
+ return os.urandom(bit_length // 8)
+
+ def encrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**31 - 1 bytes"
+ )
+
+ self._check_params(nonce, data, associated_data)
+ return aead._encrypt(backend, self, nonce, data, [associated_data], 16)
+
+ def decrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ self._check_params(nonce, data, associated_data)
+ return aead._decrypt(backend, self, nonce, data, [associated_data], 16)
+
+ def _check_params(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: bytes,
+ ) -> None:
+ utils._check_byteslike("nonce", nonce)
+ utils._check_byteslike("data", data)
+ utils._check_byteslike("associated_data", associated_data)
+ if len(nonce) < 8 or len(nonce) > 128:
+ raise ValueError("Nonce must be between 8 and 128 bytes")
+
+
+class AESOCB3:
+ _MAX_SIZE = 2**31 - 1
+
+ def __init__(self, key: bytes):
+ utils._check_byteslike("key", key)
+ if len(key) not in (16, 24, 32):
+ raise ValueError("AESOCB3 key must be 128, 192, or 256 bits.")
+
+ self._key = key
+
+ if not backend.aead_cipher_supported(self):
+ raise exceptions.UnsupportedAlgorithm(
+ "OCB3 is not supported by this version of OpenSSL",
+ exceptions._Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ @classmethod
+ def generate_key(cls, bit_length: int) -> bytes:
+ if not isinstance(bit_length, int):
+ raise TypeError("bit_length must be an integer")
+
+ if bit_length not in (128, 192, 256):
+ raise ValueError("bit_length must be 128, 192, or 256")
+
+ return os.urandom(bit_length // 8)
+
+ def encrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**31 - 1 bytes"
+ )
+
+ self._check_params(nonce, data, associated_data)
+ return aead._encrypt(backend, self, nonce, data, [associated_data], 16)
+
+ def decrypt(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: typing.Optional[bytes],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = b""
+
+ self._check_params(nonce, data, associated_data)
+ return aead._decrypt(backend, self, nonce, data, [associated_data], 16)
+
+ def _check_params(
+ self,
+ nonce: bytes,
+ data: bytes,
+ associated_data: bytes,
+ ) -> None:
+ utils._check_byteslike("nonce", nonce)
+ utils._check_byteslike("data", data)
+ utils._check_byteslike("associated_data", associated_data)
+ if len(nonce) < 12 or len(nonce) > 15:
+ raise ValueError("Nonce must be between 12 and 15 bytes")
+
+
+class AESSIV:
+ _MAX_SIZE = 2**31 - 1
+
+ def __init__(self, key: bytes):
+ utils._check_byteslike("key", key)
+ if len(key) not in (32, 48, 64):
+ raise ValueError("AESSIV key must be 256, 384, or 512 bits.")
+
+ self._key = key
+
+ if not backend.aead_cipher_supported(self):
+ raise exceptions.UnsupportedAlgorithm(
+ "AES-SIV is not supported by this version of OpenSSL",
+ exceptions._Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ @classmethod
+ def generate_key(cls, bit_length: int) -> bytes:
+ if not isinstance(bit_length, int):
+ raise TypeError("bit_length must be an integer")
+
+ if bit_length not in (256, 384, 512):
+ raise ValueError("bit_length must be 256, 384, or 512")
+
+ return os.urandom(bit_length // 8)
+
+ def encrypt(
+ self,
+ data: bytes,
+ associated_data: typing.Optional[typing.List[bytes]],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = []
+
+ self._check_params(data, associated_data)
+
+ if len(data) > self._MAX_SIZE or any(
+ len(ad) > self._MAX_SIZE for ad in associated_data
+ ):
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**31 - 1 bytes"
+ )
+
+ return aead._encrypt(backend, self, b"", data, associated_data, 16)
+
+ def decrypt(
+ self,
+ data: bytes,
+ associated_data: typing.Optional[typing.List[bytes]],
+ ) -> bytes:
+ if associated_data is None:
+ associated_data = []
+
+ self._check_params(data, associated_data)
+
+ return aead._decrypt(backend, self, b"", data, associated_data, 16)
+
+ def _check_params(
+ self,
+ data: bytes,
+ associated_data: typing.List[bytes],
+ ) -> None:
+ utils._check_byteslike("data", data)
+ if len(data) == 0:
+ raise ValueError("data must not be zero length")
+
+ if not isinstance(associated_data, list):
+ raise TypeError(
+ "associated_data must be a list of bytes-like objects or None"
+ )
+ for x in associated_data:
+ utils._check_byteslike("associated_data elements", x)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py
new file mode 100644
index 00000000..4357c17a
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py
@@ -0,0 +1,227 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+from cryptography import utils
+from cryptography.hazmat.primitives.ciphers import (
+ BlockCipherAlgorithm,
+ CipherAlgorithm,
+)
+
+
+def _verify_key_size(algorithm: CipherAlgorithm, key: bytes) -> bytes:
+ # Verify that the key is instance of bytes
+ utils._check_byteslike("key", key)
+
+ # Verify that the key size matches the expected key size
+ if len(key) * 8 not in algorithm.key_sizes:
+ raise ValueError(
+ "Invalid key size ({}) for {}.".format(
+ len(key) * 8, algorithm.name
+ )
+ )
+ return key
+
+
+class AES(BlockCipherAlgorithm):
+ name = "AES"
+ block_size = 128
+ # 512 added to support AES-256-XTS, which uses 512-bit keys
+ key_sizes = frozenset([128, 192, 256, 512])
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+class AES128(BlockCipherAlgorithm):
+ name = "AES"
+ block_size = 128
+ key_sizes = frozenset([128])
+ key_size = 128
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+
+class AES256(BlockCipherAlgorithm):
+ name = "AES"
+ block_size = 128
+ key_sizes = frozenset([256])
+ key_size = 256
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+
+class Camellia(BlockCipherAlgorithm):
+ name = "camellia"
+ block_size = 128
+ key_sizes = frozenset([128, 192, 256])
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+class TripleDES(BlockCipherAlgorithm):
+ name = "3DES"
+ block_size = 64
+ key_sizes = frozenset([64, 128, 192])
+
+ def __init__(self, key: bytes):
+ if len(key) == 8:
+ key += key + key
+ elif len(key) == 16:
+ key += key[:8]
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+class Blowfish(BlockCipherAlgorithm):
+ name = "Blowfish"
+ block_size = 64
+ key_sizes = frozenset(range(32, 449, 8))
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+_BlowfishInternal = Blowfish
+utils.deprecated(
+ Blowfish,
+ __name__,
+ "Blowfish has been deprecated",
+ utils.DeprecatedIn37,
+ name="Blowfish",
+)
+
+
+class CAST5(BlockCipherAlgorithm):
+ name = "CAST5"
+ block_size = 64
+ key_sizes = frozenset(range(40, 129, 8))
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+_CAST5Internal = CAST5
+utils.deprecated(
+ CAST5,
+ __name__,
+ "CAST5 has been deprecated",
+ utils.DeprecatedIn37,
+ name="CAST5",
+)
+
+
+class ARC4(CipherAlgorithm):
+ name = "RC4"
+ key_sizes = frozenset([40, 56, 64, 80, 128, 160, 192, 256])
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+class IDEA(BlockCipherAlgorithm):
+ name = "IDEA"
+ block_size = 64
+ key_sizes = frozenset([128])
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+_IDEAInternal = IDEA
+utils.deprecated(
+ IDEA,
+ __name__,
+ "IDEA has been deprecated",
+ utils.DeprecatedIn37,
+ name="IDEA",
+)
+
+
+class SEED(BlockCipherAlgorithm):
+ name = "SEED"
+ block_size = 128
+ key_sizes = frozenset([128])
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+_SEEDInternal = SEED
+utils.deprecated(
+ SEED,
+ __name__,
+ "SEED has been deprecated",
+ utils.DeprecatedIn37,
+ name="SEED",
+)
+
+
+class ChaCha20(CipherAlgorithm):
+ name = "ChaCha20"
+ key_sizes = frozenset([256])
+
+ def __init__(self, key: bytes, nonce: bytes):
+ self.key = _verify_key_size(self, key)
+ utils._check_byteslike("nonce", nonce)
+
+ if len(nonce) != 16:
+ raise ValueError("nonce must be 128-bits (16 bytes)")
+
+ self._nonce = nonce
+
+ @property
+ def nonce(self) -> bytes:
+ return self._nonce
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
+
+
+class SM4(BlockCipherAlgorithm):
+ name = "SM4"
+ block_size = 128
+ key_sizes = frozenset([128])
+
+ def __init__(self, key: bytes):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self) -> int:
+ return len(self.key) * 8
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/base.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/base.py
new file mode 100644
index 00000000..d80ef3f1
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/base.py
@@ -0,0 +1,268 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import abc
+import typing
+
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ AlreadyUpdated,
+ NotYetFinalized,
+)
+from cryptography.hazmat.primitives._cipheralgorithm import CipherAlgorithm
+from cryptography.hazmat.primitives.ciphers import modes
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.ciphers import (
+ _CipherContext as _BackendCipherContext,
+ )
+
+
+class CipherContext(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def update(self, data: bytes) -> bytes:
+ """
+ Processes the provided bytes through the cipher and returns the results
+ as bytes.
+ """
+
+ @abc.abstractmethod
+ def update_into(self, data: bytes, buf: bytes) -> int:
+ """
+ Processes the provided bytes and writes the resulting data into the
+ provided buffer. Returns the number of bytes written.
+ """
+
+ @abc.abstractmethod
+ def finalize(self) -> bytes:
+ """
+ Returns the results of processing the final block as bytes.
+ """
+
+
+class AEADCipherContext(CipherContext, metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def authenticate_additional_data(self, data: bytes) -> None:
+ """
+ Authenticates the provided bytes.
+ """
+
+
+class AEADDecryptionContext(AEADCipherContext, metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def finalize_with_tag(self, tag: bytes) -> bytes:
+ """
+ Returns the results of processing the final block as bytes and allows
+ delayed passing of the authentication tag.
+ """
+
+
+class AEADEncryptionContext(AEADCipherContext, metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def tag(self) -> bytes:
+ """
+ Returns tag bytes. This is only available after encryption is
+ finalized.
+ """
+
+
+Mode = typing.TypeVar(
+ "Mode", bound=typing.Optional[modes.Mode], covariant=True
+)
+
+
+class Cipher(typing.Generic[Mode]):
+ def __init__(
+ self,
+ algorithm: CipherAlgorithm,
+ mode: Mode,
+ backend: typing.Any = None,
+ ) -> None:
+ if not isinstance(algorithm, CipherAlgorithm):
+ raise TypeError("Expected interface of CipherAlgorithm.")
+
+ if mode is not None:
+ # mypy needs this assert to narrow the type from our generic
+ # type. Maybe it won't some time in the future.
+ assert isinstance(mode, modes.Mode)
+ mode.validate_for_algorithm(algorithm)
+
+ self.algorithm = algorithm
+ self.mode = mode
+
+ @typing.overload
+ def encryptor(
+ self: "Cipher[modes.ModeWithAuthenticationTag]",
+ ) -> AEADEncryptionContext:
+ ...
+
+ @typing.overload
+ def encryptor(
+ self: "_CIPHER_TYPE",
+ ) -> CipherContext:
+ ...
+
+ def encryptor(self):
+ if isinstance(self.mode, modes.ModeWithAuthenticationTag):
+ if self.mode.tag is not None:
+ raise ValueError(
+ "Authentication tag must be None when encrypting."
+ )
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ ctx = backend.create_symmetric_encryption_ctx(
+ self.algorithm, self.mode
+ )
+ return self._wrap_ctx(ctx, encrypt=True)
+
+ @typing.overload
+ def decryptor(
+ self: "Cipher[modes.ModeWithAuthenticationTag]",
+ ) -> AEADDecryptionContext:
+ ...
+
+ @typing.overload
+ def decryptor(
+ self: "_CIPHER_TYPE",
+ ) -> CipherContext:
+ ...
+
+ def decryptor(self):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ ctx = backend.create_symmetric_decryption_ctx(
+ self.algorithm, self.mode
+ )
+ return self._wrap_ctx(ctx, encrypt=False)
+
+ def _wrap_ctx(
+ self, ctx: "_BackendCipherContext", encrypt: bool
+ ) -> typing.Union[
+ AEADEncryptionContext, AEADDecryptionContext, CipherContext
+ ]:
+ if isinstance(self.mode, modes.ModeWithAuthenticationTag):
+ if encrypt:
+ return _AEADEncryptionContext(ctx)
+ else:
+ return _AEADDecryptionContext(ctx)
+ else:
+ return _CipherContext(ctx)
+
+
+_CIPHER_TYPE = Cipher[
+ typing.Union[
+ modes.ModeWithNonce,
+ modes.ModeWithTweak,
+ None,
+ modes.ECB,
+ modes.ModeWithInitializationVector,
+ ]
+]
+
+
+class _CipherContext(CipherContext):
+ _ctx: typing.Optional["_BackendCipherContext"]
+
+ def __init__(self, ctx: "_BackendCipherContext") -> None:
+ self._ctx = ctx
+
+ def update(self, data: bytes) -> bytes:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ return self._ctx.update(data)
+
+ def update_into(self, data: bytes, buf: bytes) -> int:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ return self._ctx.update_into(data, buf)
+
+ def finalize(self) -> bytes:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ data = self._ctx.finalize()
+ self._ctx = None
+ return data
+
+
+class _AEADCipherContext(AEADCipherContext):
+ _ctx: typing.Optional["_BackendCipherContext"]
+ _tag: typing.Optional[bytes]
+
+ def __init__(self, ctx: "_BackendCipherContext") -> None:
+ self._ctx = ctx
+ self._bytes_processed = 0
+ self._aad_bytes_processed = 0
+ self._tag = None
+ self._updated = False
+
+ def _check_limit(self, data_size: int) -> None:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ self._updated = True
+ self._bytes_processed += data_size
+ if self._bytes_processed > self._ctx._mode._MAX_ENCRYPTED_BYTES:
+ raise ValueError(
+ "{} has a maximum encrypted byte limit of {}".format(
+ self._ctx._mode.name, self._ctx._mode._MAX_ENCRYPTED_BYTES
+ )
+ )
+
+ def update(self, data: bytes) -> bytes:
+ self._check_limit(len(data))
+ # mypy needs this assert even though _check_limit already checked
+ assert self._ctx is not None
+ return self._ctx.update(data)
+
+ def update_into(self, data: bytes, buf: bytes) -> int:
+ self._check_limit(len(data))
+ # mypy needs this assert even though _check_limit already checked
+ assert self._ctx is not None
+ return self._ctx.update_into(data, buf)
+
+ def finalize(self) -> bytes:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ data = self._ctx.finalize()
+ self._tag = self._ctx.tag
+ self._ctx = None
+ return data
+
+ def authenticate_additional_data(self, data: bytes) -> None:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ if self._updated:
+ raise AlreadyUpdated("Update has been called on this context.")
+
+ self._aad_bytes_processed += len(data)
+ if self._aad_bytes_processed > self._ctx._mode._MAX_AAD_BYTES:
+ raise ValueError(
+ "{} has a maximum AAD byte limit of {}".format(
+ self._ctx._mode.name, self._ctx._mode._MAX_AAD_BYTES
+ )
+ )
+
+ self._ctx.authenticate_additional_data(data)
+
+
+class _AEADDecryptionContext(_AEADCipherContext, AEADDecryptionContext):
+ def finalize_with_tag(self, tag: bytes) -> bytes:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ data = self._ctx.finalize_with_tag(tag)
+ self._tag = self._ctx.tag
+ self._ctx = None
+ return data
+
+
+class _AEADEncryptionContext(_AEADCipherContext, AEADEncryptionContext):
+ @property
+ def tag(self) -> bytes:
+ if self._ctx is not None:
+ raise NotYetFinalized(
+ "You must finalize encryption before " "getting the tag."
+ )
+ assert self._tag is not None
+ return self._tag
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/modes.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/modes.py
new file mode 100644
index 00000000..1fba397f
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/ciphers/modes.py
@@ -0,0 +1,273 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import abc
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.primitives._cipheralgorithm import (
+ BlockCipherAlgorithm,
+ CipherAlgorithm,
+)
+from cryptography.hazmat.primitives.ciphers import algorithms
+
+
+class Mode(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def name(self) -> str:
+ """
+ A string naming this mode (e.g. "ECB", "CBC").
+ """
+
+ @abc.abstractmethod
+ def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None:
+ """
+ Checks that all the necessary invariants of this (mode, algorithm)
+ combination are met.
+ """
+
+
+class ModeWithInitializationVector(Mode, metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def initialization_vector(self) -> bytes:
+ """
+ The value of the initialization vector for this mode as bytes.
+ """
+
+
+class ModeWithTweak(Mode, metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def tweak(self) -> bytes:
+ """
+ The value of the tweak for this mode as bytes.
+ """
+
+
+class ModeWithNonce(Mode, metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def nonce(self) -> bytes:
+ """
+ The value of the nonce for this mode as bytes.
+ """
+
+
+class ModeWithAuthenticationTag(Mode, metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def tag(self) -> typing.Optional[bytes]:
+ """
+ The value of the tag supplied to the constructor of this mode.
+ """
+
+
+def _check_aes_key_length(self: Mode, algorithm: CipherAlgorithm) -> None:
+ if algorithm.key_size > 256 and algorithm.name == "AES":
+ raise ValueError(
+ "Only 128, 192, and 256 bit keys are allowed for this AES mode"
+ )
+
+
+def _check_iv_length(
+ self: ModeWithInitializationVector, algorithm: BlockCipherAlgorithm
+) -> None:
+ if len(self.initialization_vector) * 8 != algorithm.block_size:
+ raise ValueError(
+ "Invalid IV size ({}) for {}.".format(
+ len(self.initialization_vector), self.name
+ )
+ )
+
+
+def _check_nonce_length(
+ nonce: bytes, name: str, algorithm: CipherAlgorithm
+) -> None:
+ if not isinstance(algorithm, BlockCipherAlgorithm):
+ raise UnsupportedAlgorithm(
+ f"{name} requires a block cipher algorithm",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+ if len(nonce) * 8 != algorithm.block_size:
+ raise ValueError(f"Invalid nonce size ({len(nonce)}) for {name}.")
+
+
+def _check_iv_and_key_length(
+ self: ModeWithInitializationVector, algorithm: CipherAlgorithm
+) -> None:
+ if not isinstance(algorithm, BlockCipherAlgorithm):
+ raise UnsupportedAlgorithm(
+ f"{self} requires a block cipher algorithm",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+ _check_aes_key_length(self, algorithm)
+ _check_iv_length(self, algorithm)
+
+
+class CBC(ModeWithInitializationVector):
+ name = "CBC"
+
+ def __init__(self, initialization_vector: bytes):
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ self._initialization_vector = initialization_vector
+
+ @property
+ def initialization_vector(self) -> bytes:
+ return self._initialization_vector
+
+ validate_for_algorithm = _check_iv_and_key_length
+
+
+class XTS(ModeWithTweak):
+ name = "XTS"
+
+ def __init__(self, tweak: bytes):
+ utils._check_byteslike("tweak", tweak)
+
+ if len(tweak) != 16:
+ raise ValueError("tweak must be 128-bits (16 bytes)")
+
+ self._tweak = tweak
+
+ @property
+ def tweak(self) -> bytes:
+ return self._tweak
+
+ def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None:
+ if isinstance(algorithm, (algorithms.AES128, algorithms.AES256)):
+ raise TypeError(
+ "The AES128 and AES256 classes do not support XTS, please use "
+ "the standard AES class instead."
+ )
+
+ if algorithm.key_size not in (256, 512):
+ raise ValueError(
+ "The XTS specification requires a 256-bit key for AES-128-XTS"
+ " and 512-bit key for AES-256-XTS"
+ )
+
+
+class ECB(Mode):
+ name = "ECB"
+
+ validate_for_algorithm = _check_aes_key_length
+
+
+class OFB(ModeWithInitializationVector):
+ name = "OFB"
+
+ def __init__(self, initialization_vector: bytes):
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ self._initialization_vector = initialization_vector
+
+ @property
+ def initialization_vector(self) -> bytes:
+ return self._initialization_vector
+
+ validate_for_algorithm = _check_iv_and_key_length
+
+
+class CFB(ModeWithInitializationVector):
+ name = "CFB"
+
+ def __init__(self, initialization_vector: bytes):
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ self._initialization_vector = initialization_vector
+
+ @property
+ def initialization_vector(self) -> bytes:
+ return self._initialization_vector
+
+ validate_for_algorithm = _check_iv_and_key_length
+
+
+class CFB8(ModeWithInitializationVector):
+ name = "CFB8"
+
+ def __init__(self, initialization_vector: bytes):
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ self._initialization_vector = initialization_vector
+
+ @property
+ def initialization_vector(self) -> bytes:
+ return self._initialization_vector
+
+ validate_for_algorithm = _check_iv_and_key_length
+
+
+class CTR(ModeWithNonce):
+ name = "CTR"
+
+ def __init__(self, nonce: bytes):
+ utils._check_byteslike("nonce", nonce)
+ self._nonce = nonce
+
+ @property
+ def nonce(self) -> bytes:
+ return self._nonce
+
+ def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None:
+ _check_aes_key_length(self, algorithm)
+ _check_nonce_length(self.nonce, self.name, algorithm)
+
+
+class GCM(ModeWithInitializationVector, ModeWithAuthenticationTag):
+ name = "GCM"
+ _MAX_ENCRYPTED_BYTES = (2**39 - 256) // 8
+ _MAX_AAD_BYTES = (2**64) // 8
+
+ def __init__(
+ self,
+ initialization_vector: bytes,
+ tag: typing.Optional[bytes] = None,
+ min_tag_length: int = 16,
+ ):
+ # OpenSSL 3.0.0 constrains GCM IVs to [64, 1024] bits inclusive
+ # This is a sane limit anyway so we'll enforce it here.
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ if len(initialization_vector) < 8 or len(initialization_vector) > 128:
+ raise ValueError(
+ "initialization_vector must be between 8 and 128 bytes (64 "
+ "and 1024 bits)."
+ )
+ self._initialization_vector = initialization_vector
+ if tag is not None:
+ utils._check_bytes("tag", tag)
+ if min_tag_length < 4:
+ raise ValueError("min_tag_length must be >= 4")
+ if len(tag) < min_tag_length:
+ raise ValueError(
+ "Authentication tag must be {} bytes or longer.".format(
+ min_tag_length
+ )
+ )
+ self._tag = tag
+ self._min_tag_length = min_tag_length
+
+ @property
+ def tag(self) -> typing.Optional[bytes]:
+ return self._tag
+
+ @property
+ def initialization_vector(self) -> bytes:
+ return self._initialization_vector
+
+ def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None:
+ _check_aes_key_length(self, algorithm)
+ if not isinstance(algorithm, BlockCipherAlgorithm):
+ raise UnsupportedAlgorithm(
+ "GCM requires a block cipher algorithm",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+ block_size_bytes = algorithm.block_size // 8
+ if self._tag is not None and len(self._tag) > block_size_bytes:
+ raise ValueError(
+ "Authentication tag cannot be more than {} bytes.".format(
+ block_size_bytes
+ )
+ )
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/cmac.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/cmac.py
new file mode 100644
index 00000000..00c4bd11
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/cmac.py
@@ -0,0 +1,64 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import AlreadyFinalized
+from cryptography.hazmat.primitives import ciphers
+
+if typing.TYPE_CHECKING:
+ from cryptography.hazmat.backends.openssl.cmac import _CMACContext
+
+
+class CMAC:
+ _ctx: typing.Optional["_CMACContext"]
+ _algorithm: ciphers.BlockCipherAlgorithm
+
+ def __init__(
+ self,
+ algorithm: ciphers.BlockCipherAlgorithm,
+ backend: typing.Any = None,
+ ctx: typing.Optional["_CMACContext"] = None,
+ ) -> None:
+ if not isinstance(algorithm, ciphers.BlockCipherAlgorithm):
+ raise TypeError("Expected instance of BlockCipherAlgorithm.")
+ self._algorithm = algorithm
+
+ if ctx is None:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ self._ctx = ossl.create_cmac_ctx(self._algorithm)
+ else:
+ self._ctx = ctx
+
+ def update(self, data: bytes) -> None:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ utils._check_bytes("data", data)
+ self._ctx.update(data)
+
+ def finalize(self) -> bytes:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ digest = self._ctx.finalize()
+ self._ctx = None
+ return digest
+
+ def verify(self, signature: bytes) -> None:
+ utils._check_bytes("signature", signature)
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ ctx, self._ctx = self._ctx, None
+ ctx.verify(signature)
+
+ def copy(self) -> "CMAC":
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ return CMAC(self._algorithm, ctx=self._ctx.copy())
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/constant_time.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/constant_time.py
new file mode 100644
index 00000000..a02fa9c4
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/constant_time.py
@@ -0,0 +1,13 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import hmac
+
+
+def bytes_eq(a: bytes, b: bytes) -> bool:
+ if not isinstance(a, bytes) or not isinstance(b, bytes):
+ raise TypeError("a and b must be bytes.")
+
+ return hmac.compare_digest(a, b)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/hashes.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/hashes.py
new file mode 100644
index 00000000..6bbab4c0
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/hashes.py
@@ -0,0 +1,259 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import abc
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import AlreadyFinalized
+
+
+class HashAlgorithm(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def name(self) -> str:
+ """
+ A string naming this algorithm (e.g. "sha256", "md5").
+ """
+
+ @property
+ @abc.abstractmethod
+ def digest_size(self) -> int:
+ """
+ The size of the resulting digest in bytes.
+ """
+
+ @property
+ @abc.abstractmethod
+ def block_size(self) -> typing.Optional[int]:
+ """
+ The internal block size of the hash function, or None if the hash
+ function does not use blocks internally (e.g. SHA3).
+ """
+
+
+class HashContext(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def algorithm(self) -> HashAlgorithm:
+ """
+ A HashAlgorithm that will be used by this context.
+ """
+
+ @abc.abstractmethod
+ def update(self, data: bytes) -> None:
+ """
+ Processes the provided bytes through the hash.
+ """
+
+ @abc.abstractmethod
+ def finalize(self) -> bytes:
+ """
+ Finalizes the hash context and returns the hash digest as bytes.
+ """
+
+ @abc.abstractmethod
+ def copy(self) -> "HashContext":
+ """
+ Return a HashContext that is a copy of the current context.
+ """
+
+
+class ExtendableOutputFunction(metaclass=abc.ABCMeta):
+ """
+ An interface for extendable output functions.
+ """
+
+
+class Hash(HashContext):
+ _ctx: typing.Optional[HashContext]
+
+ def __init__(
+ self,
+ algorithm: HashAlgorithm,
+ backend: typing.Any = None,
+ ctx: typing.Optional["HashContext"] = None,
+ ) -> None:
+ if not isinstance(algorithm, HashAlgorithm):
+ raise TypeError("Expected instance of hashes.HashAlgorithm.")
+ self._algorithm = algorithm
+
+ if ctx is None:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ self._ctx = ossl.create_hash_ctx(self.algorithm)
+ else:
+ self._ctx = ctx
+
+ @property
+ def algorithm(self) -> HashAlgorithm:
+ return self._algorithm
+
+ def update(self, data: bytes) -> None:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ utils._check_byteslike("data", data)
+ self._ctx.update(data)
+
+ def copy(self) -> "Hash":
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ return Hash(self.algorithm, ctx=self._ctx.copy())
+
+ def finalize(self) -> bytes:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ digest = self._ctx.finalize()
+ self._ctx = None
+ return digest
+
+
+class SHA1(HashAlgorithm):
+ name = "sha1"
+ digest_size = 20
+ block_size = 64
+
+
+class SHA512_224(HashAlgorithm): # noqa: N801
+ name = "sha512-224"
+ digest_size = 28
+ block_size = 128
+
+
+class SHA512_256(HashAlgorithm): # noqa: N801
+ name = "sha512-256"
+ digest_size = 32
+ block_size = 128
+
+
+class SHA224(HashAlgorithm):
+ name = "sha224"
+ digest_size = 28
+ block_size = 64
+
+
+class SHA256(HashAlgorithm):
+ name = "sha256"
+ digest_size = 32
+ block_size = 64
+
+
+class SHA384(HashAlgorithm):
+ name = "sha384"
+ digest_size = 48
+ block_size = 128
+
+
+class SHA512(HashAlgorithm):
+ name = "sha512"
+ digest_size = 64
+ block_size = 128
+
+
+class SHA3_224(HashAlgorithm): # noqa: N801
+ name = "sha3-224"
+ digest_size = 28
+ block_size = None
+
+
+class SHA3_256(HashAlgorithm): # noqa: N801
+ name = "sha3-256"
+ digest_size = 32
+ block_size = None
+
+
+class SHA3_384(HashAlgorithm): # noqa: N801
+ name = "sha3-384"
+ digest_size = 48
+ block_size = None
+
+
+class SHA3_512(HashAlgorithm): # noqa: N801
+ name = "sha3-512"
+ digest_size = 64
+ block_size = None
+
+
+class SHAKE128(HashAlgorithm, ExtendableOutputFunction):
+ name = "shake128"
+ block_size = None
+
+ def __init__(self, digest_size: int):
+ if not isinstance(digest_size, int):
+ raise TypeError("digest_size must be an integer")
+
+ if digest_size < 1:
+ raise ValueError("digest_size must be a positive integer")
+
+ self._digest_size = digest_size
+
+ @property
+ def digest_size(self) -> int:
+ return self._digest_size
+
+
+class SHAKE256(HashAlgorithm, ExtendableOutputFunction):
+ name = "shake256"
+ block_size = None
+
+ def __init__(self, digest_size: int):
+ if not isinstance(digest_size, int):
+ raise TypeError("digest_size must be an integer")
+
+ if digest_size < 1:
+ raise ValueError("digest_size must be a positive integer")
+
+ self._digest_size = digest_size
+
+ @property
+ def digest_size(self) -> int:
+ return self._digest_size
+
+
+class MD5(HashAlgorithm):
+ name = "md5"
+ digest_size = 16
+ block_size = 64
+
+
+class BLAKE2b(HashAlgorithm):
+ name = "blake2b"
+ _max_digest_size = 64
+ _min_digest_size = 1
+ block_size = 128
+
+ def __init__(self, digest_size: int):
+ if digest_size != 64:
+ raise ValueError("Digest size must be 64")
+
+ self._digest_size = digest_size
+
+ @property
+ def digest_size(self) -> int:
+ return self._digest_size
+
+
+class BLAKE2s(HashAlgorithm):
+ name = "blake2s"
+ block_size = 64
+ _max_digest_size = 32
+ _min_digest_size = 1
+
+ def __init__(self, digest_size: int):
+ if digest_size != 32:
+ raise ValueError("Digest size must be 32")
+
+ self._digest_size = digest_size
+
+ @property
+ def digest_size(self) -> int:
+ return self._digest_size
+
+
+class SM3(HashAlgorithm):
+ name = "sm3"
+ digest_size = 32
+ block_size = 64
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/hmac.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/hmac.py
new file mode 100644
index 00000000..8f1c0eae
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/hmac.py
@@ -0,0 +1,70 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import AlreadyFinalized
+from cryptography.hazmat.backends.openssl.hmac import _HMACContext
+from cryptography.hazmat.primitives import hashes
+
+
+class HMAC(hashes.HashContext):
+ _ctx: typing.Optional[_HMACContext]
+
+ def __init__(
+ self,
+ key: bytes,
+ algorithm: hashes.HashAlgorithm,
+ backend: typing.Any = None,
+ ctx=None,
+ ):
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Expected instance of hashes.HashAlgorithm.")
+ self._algorithm = algorithm
+
+ self._key = key
+ if ctx is None:
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ self._ctx = ossl.create_hmac_ctx(key, self.algorithm)
+ else:
+ self._ctx = ctx
+
+ @property
+ def algorithm(self) -> hashes.HashAlgorithm:
+ return self._algorithm
+
+ def update(self, data: bytes) -> None:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ utils._check_byteslike("data", data)
+ self._ctx.update(data)
+
+ def copy(self) -> "HMAC":
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ return HMAC(
+ self._key,
+ self.algorithm,
+ ctx=self._ctx.copy(),
+ )
+
+ def finalize(self) -> bytes:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ digest = self._ctx.finalize()
+ self._ctx = None
+ return digest
+
+ def verify(self, signature: bytes) -> None:
+ utils._check_bytes("signature", signature)
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ ctx, self._ctx = self._ctx, None
+ ctx.verify(signature)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__init__.py
new file mode 100644
index 00000000..38e2f8bc
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/__init__.py
@@ -0,0 +1,22 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import abc
+
+
+class KeyDerivationFunction(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def derive(self, key_material: bytes) -> bytes:
+ """
+ Deterministically generates and returns a new key based on the existing
+ key material.
+ """
+
+ @abc.abstractmethod
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ """
+ Checks whether the key generated by the key material matches the
+ expected derived key. Raises an exception if they do not match.
+ """
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py
new file mode 100644
index 00000000..7bbce4ff
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py
@@ -0,0 +1,123 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import AlreadyFinalized, InvalidKey
+from cryptography.hazmat.primitives import constant_time, hashes, hmac
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+def _int_to_u32be(n: int) -> bytes:
+ return n.to_bytes(length=4, byteorder="big")
+
+
+def _common_args_checks(
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ otherinfo: typing.Optional[bytes],
+) -> None:
+ max_length = algorithm.digest_size * (2**32 - 1)
+ if length > max_length:
+ raise ValueError(f"Cannot derive keys larger than {max_length} bits.")
+ if otherinfo is not None:
+ utils._check_bytes("otherinfo", otherinfo)
+
+
+def _concatkdf_derive(
+ key_material: bytes,
+ length: int,
+ auxfn: typing.Callable[[], hashes.HashContext],
+ otherinfo: bytes,
+) -> bytes:
+ utils._check_byteslike("key_material", key_material)
+ output = [b""]
+ outlen = 0
+ counter = 1
+
+ while length > outlen:
+ h = auxfn()
+ h.update(_int_to_u32be(counter))
+ h.update(key_material)
+ h.update(otherinfo)
+ output.append(h.finalize())
+ outlen += len(output[-1])
+ counter += 1
+
+ return b"".join(output)[:length]
+
+
+class ConcatKDFHash(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ otherinfo: typing.Optional[bytes],
+ backend: typing.Any = None,
+ ):
+ _common_args_checks(algorithm, length, otherinfo)
+ self._algorithm = algorithm
+ self._length = length
+ self._otherinfo: bytes = otherinfo if otherinfo is not None else b""
+
+ self._used = False
+
+ def _hash(self) -> hashes.Hash:
+ return hashes.Hash(self._algorithm)
+
+ def derive(self, key_material: bytes) -> bytes:
+ if self._used:
+ raise AlreadyFinalized
+ self._used = True
+ return _concatkdf_derive(
+ key_material, self._length, self._hash, self._otherinfo
+ )
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
+
+
+class ConcatKDFHMAC(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ salt: typing.Optional[bytes],
+ otherinfo: typing.Optional[bytes],
+ backend: typing.Any = None,
+ ):
+ _common_args_checks(algorithm, length, otherinfo)
+ self._algorithm = algorithm
+ self._length = length
+ self._otherinfo: bytes = otherinfo if otherinfo is not None else b""
+
+ if algorithm.block_size is None:
+ raise TypeError(f"{algorithm.name} is unsupported for ConcatKDF")
+
+ if salt is None:
+ salt = b"\x00" * algorithm.block_size
+ else:
+ utils._check_bytes("salt", salt)
+
+ self._salt = salt
+
+ self._used = False
+
+ def _hmac(self) -> hmac.HMAC:
+ return hmac.HMAC(self._salt, self._algorithm)
+
+ def derive(self, key_material: bytes) -> bytes:
+ if self._used:
+ raise AlreadyFinalized
+ self._used = True
+ return _concatkdf_derive(
+ key_material, self._length, self._hmac, self._otherinfo
+ )
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py
new file mode 100644
index 00000000..7d59a7ef
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py
@@ -0,0 +1,100 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import AlreadyFinalized, InvalidKey
+from cryptography.hazmat.primitives import constant_time, hashes, hmac
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+class HKDF(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ salt: typing.Optional[bytes],
+ info: typing.Optional[bytes],
+ backend: typing.Any = None,
+ ):
+ self._algorithm = algorithm
+
+ if salt is None:
+ salt = b"\x00" * self._algorithm.digest_size
+ else:
+ utils._check_bytes("salt", salt)
+
+ self._salt = salt
+
+ self._hkdf_expand = HKDFExpand(self._algorithm, length, info)
+
+ def _extract(self, key_material: bytes) -> bytes:
+ h = hmac.HMAC(self._salt, self._algorithm)
+ h.update(key_material)
+ return h.finalize()
+
+ def derive(self, key_material: bytes) -> bytes:
+ utils._check_byteslike("key_material", key_material)
+ return self._hkdf_expand.derive(self._extract(key_material))
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
+
+
+class HKDFExpand(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ info: typing.Optional[bytes],
+ backend: typing.Any = None,
+ ):
+ self._algorithm = algorithm
+
+ max_length = 255 * algorithm.digest_size
+
+ if length > max_length:
+ raise ValueError(
+ f"Cannot derive keys larger than {max_length} octets."
+ )
+
+ self._length = length
+
+ if info is None:
+ info = b""
+ else:
+ utils._check_bytes("info", info)
+
+ self._info = info
+
+ self._used = False
+
+ def _expand(self, key_material: bytes) -> bytes:
+ output = [b""]
+ counter = 1
+
+ while self._algorithm.digest_size * (len(output) - 1) < self._length:
+ h = hmac.HMAC(key_material, self._algorithm)
+ h.update(output[-1])
+ h.update(self._info)
+ h.update(bytes([counter]))
+ output.append(h.finalize())
+ counter += 1
+
+ return b"".join(output)[: self._length]
+
+ def derive(self, key_material: bytes) -> bytes:
+ utils._check_byteslike("key_material", key_material)
+ if self._used:
+ raise AlreadyFinalized
+
+ self._used = True
+ return self._expand(key_material)
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py
new file mode 100644
index 00000000..7f185a9a
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py
@@ -0,0 +1,297 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.primitives import (
+ ciphers,
+ cmac,
+ constant_time,
+ hashes,
+ hmac,
+)
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+class Mode(utils.Enum):
+ CounterMode = "ctr"
+
+
+class CounterLocation(utils.Enum):
+ BeforeFixed = "before_fixed"
+ AfterFixed = "after_fixed"
+ MiddleFixed = "middle_fixed"
+
+
+class _KBKDFDeriver:
+ def __init__(
+ self,
+ prf: typing.Callable,
+ mode: Mode,
+ length: int,
+ rlen: int,
+ llen: typing.Optional[int],
+ location: CounterLocation,
+ break_location: typing.Optional[int],
+ label: typing.Optional[bytes],
+ context: typing.Optional[bytes],
+ fixed: typing.Optional[bytes],
+ ):
+ assert callable(prf)
+
+ if not isinstance(mode, Mode):
+ raise TypeError("mode must be of type Mode")
+
+ if not isinstance(location, CounterLocation):
+ raise TypeError("location must be of type CounterLocation")
+
+ if break_location is None and location is CounterLocation.MiddleFixed:
+ raise ValueError("Please specify a break_location")
+
+ if (
+ break_location is not None
+ and location != CounterLocation.MiddleFixed
+ ):
+ raise ValueError(
+ "break_location is ignored when location is not"
+ " CounterLocation.MiddleFixed"
+ )
+
+ if break_location is not None and not isinstance(break_location, int):
+ raise TypeError("break_location must be an integer")
+
+ if break_location is not None and break_location < 0:
+ raise ValueError("break_location must be a positive integer")
+
+ if (label or context) and fixed:
+ raise ValueError(
+ "When supplying fixed data, " "label and context are ignored."
+ )
+
+ if rlen is None or not self._valid_byte_length(rlen):
+ raise ValueError("rlen must be between 1 and 4")
+
+ if llen is None and fixed is None:
+ raise ValueError("Please specify an llen")
+
+ if llen is not None and not isinstance(llen, int):
+ raise TypeError("llen must be an integer")
+
+ if label is None:
+ label = b""
+
+ if context is None:
+ context = b""
+
+ utils._check_bytes("label", label)
+ utils._check_bytes("context", context)
+ self._prf = prf
+ self._mode = mode
+ self._length = length
+ self._rlen = rlen
+ self._llen = llen
+ self._location = location
+ self._break_location = break_location
+ self._label = label
+ self._context = context
+ self._used = False
+ self._fixed_data = fixed
+
+ @staticmethod
+ def _valid_byte_length(value: int) -> bool:
+ if not isinstance(value, int):
+ raise TypeError("value must be of type int")
+
+ value_bin = utils.int_to_bytes(1, value)
+ if not 1 <= len(value_bin) <= 4:
+ return False
+ return True
+
+ def derive(self, key_material: bytes, prf_output_size: int) -> bytes:
+ if self._used:
+ raise AlreadyFinalized
+
+ utils._check_byteslike("key_material", key_material)
+ self._used = True
+
+ # inverse floor division (equivalent to ceiling)
+ rounds = -(-self._length // prf_output_size)
+
+ output = [b""]
+
+ # For counter mode, the number of iterations shall not be
+ # larger than 2^r-1, where r <= 32 is the binary length of the counter
+ # This ensures that the counter values used as an input to the
+ # PRF will not repeat during a particular call to the KDF function.
+ r_bin = utils.int_to_bytes(1, self._rlen)
+ if rounds > pow(2, len(r_bin) * 8) - 1:
+ raise ValueError("There are too many iterations.")
+
+ fixed = self._generate_fixed_input()
+
+ if self._location == CounterLocation.BeforeFixed:
+ data_before_ctr = b""
+ data_after_ctr = fixed
+ elif self._location == CounterLocation.AfterFixed:
+ data_before_ctr = fixed
+ data_after_ctr = b""
+ else:
+ if isinstance(
+ self._break_location, int
+ ) and self._break_location > len(fixed):
+ raise ValueError("break_location offset > len(fixed)")
+ data_before_ctr = fixed[: self._break_location]
+ data_after_ctr = fixed[self._break_location :]
+
+ for i in range(1, rounds + 1):
+ h = self._prf(key_material)
+
+ counter = utils.int_to_bytes(i, self._rlen)
+ input_data = data_before_ctr + counter + data_after_ctr
+
+ h.update(input_data)
+
+ output.append(h.finalize())
+
+ return b"".join(output)[: self._length]
+
+ def _generate_fixed_input(self) -> bytes:
+ if self._fixed_data and isinstance(self._fixed_data, bytes):
+ return self._fixed_data
+
+ l_val = utils.int_to_bytes(self._length * 8, self._llen)
+
+ return b"".join([self._label, b"\x00", self._context, l_val])
+
+
+class KBKDFHMAC(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ mode: Mode,
+ length: int,
+ rlen: int,
+ llen: typing.Optional[int],
+ location: CounterLocation,
+ label: typing.Optional[bytes],
+ context: typing.Optional[bytes],
+ fixed: typing.Optional[bytes],
+ backend: typing.Any = None,
+ *,
+ break_location: typing.Optional[int] = None,
+ ):
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise UnsupportedAlgorithm(
+ "Algorithm supplied is not a supported hash algorithm.",
+ _Reasons.UNSUPPORTED_HASH,
+ )
+
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ if not ossl.hmac_supported(algorithm):
+ raise UnsupportedAlgorithm(
+ "Algorithm supplied is not a supported hmac algorithm.",
+ _Reasons.UNSUPPORTED_HASH,
+ )
+
+ self._algorithm = algorithm
+
+ self._deriver = _KBKDFDeriver(
+ self._prf,
+ mode,
+ length,
+ rlen,
+ llen,
+ location,
+ break_location,
+ label,
+ context,
+ fixed,
+ )
+
+ def _prf(self, key_material: bytes) -> hmac.HMAC:
+ return hmac.HMAC(key_material, self._algorithm)
+
+ def derive(self, key_material: bytes) -> bytes:
+ return self._deriver.derive(key_material, self._algorithm.digest_size)
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
+
+
+class KBKDFCMAC(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm,
+ mode: Mode,
+ length: int,
+ rlen: int,
+ llen: typing.Optional[int],
+ location: CounterLocation,
+ label: typing.Optional[bytes],
+ context: typing.Optional[bytes],
+ fixed: typing.Optional[bytes],
+ backend: typing.Any = None,
+ *,
+ break_location: typing.Optional[int] = None,
+ ):
+ if not issubclass(
+ algorithm, ciphers.BlockCipherAlgorithm
+ ) or not issubclass(algorithm, ciphers.CipherAlgorithm):
+ raise UnsupportedAlgorithm(
+ "Algorithm supplied is not a supported cipher algorithm.",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ self._algorithm = algorithm
+ self._cipher: typing.Optional[ciphers.BlockCipherAlgorithm] = None
+
+ self._deriver = _KBKDFDeriver(
+ self._prf,
+ mode,
+ length,
+ rlen,
+ llen,
+ location,
+ break_location,
+ label,
+ context,
+ fixed,
+ )
+
+ def _prf(self, _: bytes) -> cmac.CMAC:
+ assert self._cipher is not None
+
+ return cmac.CMAC(self._cipher)
+
+ def derive(self, key_material: bytes) -> bytes:
+ self._cipher = self._algorithm(key_material)
+
+ assert self._cipher is not None
+
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ if not ossl.cmac_algorithm_supported(self._cipher):
+ raise UnsupportedAlgorithm(
+ "Algorithm supplied is not a supported cipher algorithm.",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ return self._deriver.derive(key_material, self._cipher.block_size // 8)
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py
new file mode 100644
index 00000000..8d23f8c2
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py
@@ -0,0 +1,65 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.primitives import constant_time, hashes
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+class PBKDF2HMAC(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ salt: bytes,
+ iterations: int,
+ backend: typing.Any = None,
+ ):
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ if not ossl.pbkdf2_hmac_supported(algorithm):
+ raise UnsupportedAlgorithm(
+ "{} is not supported for PBKDF2 by this backend.".format(
+ algorithm.name
+ ),
+ _Reasons.UNSUPPORTED_HASH,
+ )
+ self._used = False
+ self._algorithm = algorithm
+ self._length = length
+ utils._check_bytes("salt", salt)
+ self._salt = salt
+ self._iterations = iterations
+
+ def derive(self, key_material: bytes) -> bytes:
+ if self._used:
+ raise AlreadyFinalized("PBKDF2 instances can only be used once.")
+ self._used = True
+
+ utils._check_byteslike("key_material", key_material)
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.derive_pbkdf2_hmac(
+ self._algorithm,
+ self._length,
+ self._salt,
+ self._iterations,
+ key_material,
+ )
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ derived_key = self.derive(key_material)
+ if not constant_time.bytes_eq(derived_key, expected_key):
+ raise InvalidKey("Keys do not match.")
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py
new file mode 100644
index 00000000..286f4388
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py
@@ -0,0 +1,73 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import sys
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+)
+from cryptography.hazmat.primitives import constant_time
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+# This is used by the scrypt tests to skip tests that require more memory
+# than the MEM_LIMIT
+_MEM_LIMIT = sys.maxsize // 2
+
+
+class Scrypt(KeyDerivationFunction):
+ def __init__(
+ self,
+ salt: bytes,
+ length: int,
+ n: int,
+ r: int,
+ p: int,
+ backend: typing.Any = None,
+ ):
+ from cryptography.hazmat.backends.openssl.backend import (
+ backend as ossl,
+ )
+
+ if not ossl.scrypt_supported():
+ raise UnsupportedAlgorithm(
+ "This version of OpenSSL does not support scrypt"
+ )
+ self._length = length
+ utils._check_bytes("salt", salt)
+ if n < 2 or (n & (n - 1)) != 0:
+ raise ValueError("n must be greater than 1 and be a power of 2.")
+
+ if r < 1:
+ raise ValueError("r must be greater than or equal to 1.")
+
+ if p < 1:
+ raise ValueError("p must be greater than or equal to 1.")
+
+ self._used = False
+ self._salt = salt
+ self._n = n
+ self._r = r
+ self._p = p
+
+ def derive(self, key_material: bytes) -> bytes:
+ if self._used:
+ raise AlreadyFinalized("Scrypt instances can only be used once.")
+ self._used = True
+
+ utils._check_byteslike("key_material", key_material)
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.derive_scrypt(
+ key_material, self._salt, self._length, self._n, self._r, self._p
+ )
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ derived_key = self.derive(key_material)
+ if not constant_time.bytes_eq(derived_key, expected_key):
+ raise InvalidKey("Keys do not match.")
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py
new file mode 100644
index 00000000..4ab64d08
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py
@@ -0,0 +1,60 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import AlreadyFinalized, InvalidKey
+from cryptography.hazmat.primitives import constant_time, hashes
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+def _int_to_u32be(n: int) -> bytes:
+ return n.to_bytes(length=4, byteorder="big")
+
+
+class X963KDF(KeyDerivationFunction):
+ def __init__(
+ self,
+ algorithm: hashes.HashAlgorithm,
+ length: int,
+ sharedinfo: typing.Optional[bytes],
+ backend: typing.Any = None,
+ ):
+ max_len = algorithm.digest_size * (2**32 - 1)
+ if length > max_len:
+ raise ValueError(f"Cannot derive keys larger than {max_len} bits.")
+ if sharedinfo is not None:
+ utils._check_bytes("sharedinfo", sharedinfo)
+
+ self._algorithm = algorithm
+ self._length = length
+ self._sharedinfo = sharedinfo
+ self._used = False
+
+ def derive(self, key_material: bytes) -> bytes:
+ if self._used:
+ raise AlreadyFinalized
+ self._used = True
+ utils._check_byteslike("key_material", key_material)
+ output = [b""]
+ outlen = 0
+ counter = 1
+
+ while self._length > outlen:
+ h = hashes.Hash(self._algorithm)
+ h.update(key_material)
+ h.update(_int_to_u32be(counter))
+ if self._sharedinfo is not None:
+ h.update(self._sharedinfo)
+ output.append(h.finalize())
+ outlen += len(output[-1])
+ counter += 1
+
+ return b"".join(output)[: self._length]
+
+ def verify(self, key_material: bytes, expected_key: bytes) -> None:
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/keywrap.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/keywrap.py
new file mode 100644
index 00000000..64771ca3
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/keywrap.py
@@ -0,0 +1,176 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import typing
+
+from cryptography.hazmat.primitives.ciphers import Cipher
+from cryptography.hazmat.primitives.ciphers.algorithms import AES
+from cryptography.hazmat.primitives.ciphers.modes import ECB
+from cryptography.hazmat.primitives.constant_time import bytes_eq
+
+
+def _wrap_core(
+ wrapping_key: bytes,
+ a: bytes,
+ r: typing.List[bytes],
+) -> bytes:
+ # RFC 3394 Key Wrap - 2.2.1 (index method)
+ encryptor = Cipher(AES(wrapping_key), ECB()).encryptor()
+ n = len(r)
+ for j in range(6):
+ for i in range(n):
+ # every encryption operation is a discrete 16 byte chunk (because
+ # AES has a 128-bit block size) and since we're using ECB it is
+ # safe to reuse the encryptor for the entire operation
+ b = encryptor.update(a + r[i])
+ a = (
+ int.from_bytes(b[:8], byteorder="big") ^ ((n * j) + i + 1)
+ ).to_bytes(length=8, byteorder="big")
+ r[i] = b[-8:]
+
+ assert encryptor.finalize() == b""
+
+ return a + b"".join(r)
+
+
+def aes_key_wrap(
+ wrapping_key: bytes,
+ key_to_wrap: bytes,
+ backend: typing.Any = None,
+) -> bytes:
+ if len(wrapping_key) not in [16, 24, 32]:
+ raise ValueError("The wrapping key must be a valid AES key length")
+
+ if len(key_to_wrap) < 16:
+ raise ValueError("The key to wrap must be at least 16 bytes")
+
+ if len(key_to_wrap) % 8 != 0:
+ raise ValueError("The key to wrap must be a multiple of 8 bytes")
+
+ a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6"
+ r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)]
+ return _wrap_core(wrapping_key, a, r)
+
+
+def _unwrap_core(
+ wrapping_key: bytes,
+ a: bytes,
+ r: typing.List[bytes],
+) -> typing.Tuple[bytes, typing.List[bytes]]:
+ # Implement RFC 3394 Key Unwrap - 2.2.2 (index method)
+ decryptor = Cipher(AES(wrapping_key), ECB()).decryptor()
+ n = len(r)
+ for j in reversed(range(6)):
+ for i in reversed(range(n)):
+ atr = (
+ int.from_bytes(a, byteorder="big") ^ ((n * j) + i + 1)
+ ).to_bytes(length=8, byteorder="big") + r[i]
+ # every decryption operation is a discrete 16 byte chunk so
+ # it is safe to reuse the decryptor for the entire operation
+ b = decryptor.update(atr)
+ a = b[:8]
+ r[i] = b[-8:]
+
+ assert decryptor.finalize() == b""
+ return a, r
+
+
+def aes_key_wrap_with_padding(
+ wrapping_key: bytes,
+ key_to_wrap: bytes,
+ backend: typing.Any = None,
+) -> bytes:
+ if len(wrapping_key) not in [16, 24, 32]:
+ raise ValueError("The wrapping key must be a valid AES key length")
+
+ aiv = b"\xA6\x59\x59\xA6" + len(key_to_wrap).to_bytes(
+ length=4, byteorder="big"
+ )
+ # pad the key to wrap if necessary
+ pad = (8 - (len(key_to_wrap) % 8)) % 8
+ key_to_wrap = key_to_wrap + b"\x00" * pad
+ if len(key_to_wrap) == 8:
+ # RFC 5649 - 4.1 - exactly 8 octets after padding
+ encryptor = Cipher(AES(wrapping_key), ECB()).encryptor()
+ b = encryptor.update(aiv + key_to_wrap)
+ assert encryptor.finalize() == b""
+ return b
+ else:
+ r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)]
+ return _wrap_core(wrapping_key, aiv, r)
+
+
+def aes_key_unwrap_with_padding(
+ wrapping_key: bytes,
+ wrapped_key: bytes,
+ backend: typing.Any = None,
+) -> bytes:
+ if len(wrapped_key) < 16:
+ raise InvalidUnwrap("Must be at least 16 bytes")
+
+ if len(wrapping_key) not in [16, 24, 32]:
+ raise ValueError("The wrapping key must be a valid AES key length")
+
+ if len(wrapped_key) == 16:
+ # RFC 5649 - 4.2 - exactly two 64-bit blocks
+ decryptor = Cipher(AES(wrapping_key), ECB()).decryptor()
+ out = decryptor.update(wrapped_key)
+ assert decryptor.finalize() == b""
+ a = out[:8]
+ data = out[8:]
+ n = 1
+ else:
+ r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)]
+ encrypted_aiv = r.pop(0)
+ n = len(r)
+ a, r = _unwrap_core(wrapping_key, encrypted_aiv, r)
+ data = b"".join(r)
+
+ # 1) Check that MSB(32,A) = A65959A6.
+ # 2) Check that 8*(n-1) < LSB(32,A) <= 8*n. If so, let
+ # MLI = LSB(32,A).
+ # 3) Let b = (8*n)-MLI, and then check that the rightmost b octets of
+ # the output data are zero.
+ mli = int.from_bytes(a[4:], byteorder="big")
+ b = (8 * n) - mli
+ if (
+ not bytes_eq(a[:4], b"\xa6\x59\x59\xa6")
+ or not 8 * (n - 1) < mli <= 8 * n
+ or (b != 0 and not bytes_eq(data[-b:], b"\x00" * b))
+ ):
+ raise InvalidUnwrap()
+
+ if b == 0:
+ return data
+ else:
+ return data[:-b]
+
+
+def aes_key_unwrap(
+ wrapping_key: bytes,
+ wrapped_key: bytes,
+ backend: typing.Any = None,
+) -> bytes:
+ if len(wrapped_key) < 24:
+ raise InvalidUnwrap("Must be at least 24 bytes")
+
+ if len(wrapped_key) % 8 != 0:
+ raise InvalidUnwrap("The wrapped key must be a multiple of 8 bytes")
+
+ if len(wrapping_key) not in [16, 24, 32]:
+ raise ValueError("The wrapping key must be a valid AES key length")
+
+ aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6"
+ r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)]
+ a = r.pop(0)
+ a, r = _unwrap_core(wrapping_key, a, r)
+ if not bytes_eq(a, aiv):
+ raise InvalidUnwrap()
+
+ return b"".join(r)
+
+
+class InvalidUnwrap(Exception):
+ pass
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/padding.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/padding.py
new file mode 100644
index 00000000..d6c1d915
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/padding.py
@@ -0,0 +1,224 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import abc
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import AlreadyFinalized
+from cryptography.hazmat.bindings._rust import (
+ check_ansix923_padding,
+ check_pkcs7_padding,
+)
+
+
+class PaddingContext(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def update(self, data: bytes) -> bytes:
+ """
+ Pads the provided bytes and returns any available data as bytes.
+ """
+
+ @abc.abstractmethod
+ def finalize(self) -> bytes:
+ """
+ Finalize the padding, returns bytes.
+ """
+
+
+def _byte_padding_check(block_size: int) -> None:
+ if not (0 <= block_size <= 2040):
+ raise ValueError("block_size must be in range(0, 2041).")
+
+ if block_size % 8 != 0:
+ raise ValueError("block_size must be a multiple of 8.")
+
+
+def _byte_padding_update(
+ buffer_: typing.Optional[bytes], data: bytes, block_size: int
+) -> typing.Tuple[bytes, bytes]:
+ if buffer_ is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ utils._check_byteslike("data", data)
+
+ buffer_ += bytes(data)
+
+ finished_blocks = len(buffer_) // (block_size // 8)
+
+ result = buffer_[: finished_blocks * (block_size // 8)]
+ buffer_ = buffer_[finished_blocks * (block_size // 8) :]
+
+ return buffer_, result
+
+
+def _byte_padding_pad(
+ buffer_: typing.Optional[bytes],
+ block_size: int,
+ paddingfn: typing.Callable[[int], bytes],
+) -> bytes:
+ if buffer_ is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ pad_size = block_size // 8 - len(buffer_)
+ return buffer_ + paddingfn(pad_size)
+
+
+def _byte_unpadding_update(
+ buffer_: typing.Optional[bytes], data: bytes, block_size: int
+) -> typing.Tuple[bytes, bytes]:
+ if buffer_ is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ utils._check_byteslike("data", data)
+
+ buffer_ += bytes(data)
+
+ finished_blocks = max(len(buffer_) // (block_size // 8) - 1, 0)
+
+ result = buffer_[: finished_blocks * (block_size // 8)]
+ buffer_ = buffer_[finished_blocks * (block_size // 8) :]
+
+ return buffer_, result
+
+
+def _byte_unpadding_check(
+ buffer_: typing.Optional[bytes],
+ block_size: int,
+ checkfn: typing.Callable[[bytes], int],
+) -> bytes:
+ if buffer_ is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ if len(buffer_) != block_size // 8:
+ raise ValueError("Invalid padding bytes.")
+
+ valid = checkfn(buffer_)
+
+ if not valid:
+ raise ValueError("Invalid padding bytes.")
+
+ pad_size = buffer_[-1]
+ return buffer_[:-pad_size]
+
+
+class PKCS7:
+ def __init__(self, block_size: int):
+ _byte_padding_check(block_size)
+ self.block_size = block_size
+
+ def padder(self) -> PaddingContext:
+ return _PKCS7PaddingContext(self.block_size)
+
+ def unpadder(self) -> PaddingContext:
+ return _PKCS7UnpaddingContext(self.block_size)
+
+
+class _PKCS7PaddingContext(PaddingContext):
+ _buffer: typing.Optional[bytes]
+
+ def __init__(self, block_size: int):
+ self.block_size = block_size
+ # TODO: more copies than necessary, we should use zero-buffer (#193)
+ self._buffer = b""
+
+ def update(self, data: bytes) -> bytes:
+ self._buffer, result = _byte_padding_update(
+ self._buffer, data, self.block_size
+ )
+ return result
+
+ def _padding(self, size: int) -> bytes:
+ return bytes([size]) * size
+
+ def finalize(self) -> bytes:
+ result = _byte_padding_pad(
+ self._buffer, self.block_size, self._padding
+ )
+ self._buffer = None
+ return result
+
+
+class _PKCS7UnpaddingContext(PaddingContext):
+ _buffer: typing.Optional[bytes]
+
+ def __init__(self, block_size: int):
+ self.block_size = block_size
+ # TODO: more copies than necessary, we should use zero-buffer (#193)
+ self._buffer = b""
+
+ def update(self, data: bytes) -> bytes:
+ self._buffer, result = _byte_unpadding_update(
+ self._buffer, data, self.block_size
+ )
+ return result
+
+ def finalize(self) -> bytes:
+ result = _byte_unpadding_check(
+ self._buffer, self.block_size, check_pkcs7_padding
+ )
+ self._buffer = None
+ return result
+
+
+class ANSIX923:
+ def __init__(self, block_size: int):
+ _byte_padding_check(block_size)
+ self.block_size = block_size
+
+ def padder(self) -> PaddingContext:
+ return _ANSIX923PaddingContext(self.block_size)
+
+ def unpadder(self) -> PaddingContext:
+ return _ANSIX923UnpaddingContext(self.block_size)
+
+
+class _ANSIX923PaddingContext(PaddingContext):
+ _buffer: typing.Optional[bytes]
+
+ def __init__(self, block_size: int):
+ self.block_size = block_size
+ # TODO: more copies than necessary, we should use zero-buffer (#193)
+ self._buffer = b""
+
+ def update(self, data: bytes) -> bytes:
+ self._buffer, result = _byte_padding_update(
+ self._buffer, data, self.block_size
+ )
+ return result
+
+ def _padding(self, size: int) -> bytes:
+ return bytes([0]) * (size - 1) + bytes([size])
+
+ def finalize(self) -> bytes:
+ result = _byte_padding_pad(
+ self._buffer, self.block_size, self._padding
+ )
+ self._buffer = None
+ return result
+
+
+class _ANSIX923UnpaddingContext(PaddingContext):
+ _buffer: typing.Optional[bytes]
+
+ def __init__(self, block_size: int):
+ self.block_size = block_size
+ # TODO: more copies than necessary, we should use zero-buffer (#193)
+ self._buffer = b""
+
+ def update(self, data: bytes) -> bytes:
+ self._buffer, result = _byte_unpadding_update(
+ self._buffer, data, self.block_size
+ )
+ return result
+
+ def finalize(self) -> bytes:
+ result = _byte_unpadding_check(
+ self._buffer,
+ self.block_size,
+ check_ansix923_padding,
+ )
+ self._buffer = None
+ return result
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/poly1305.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/poly1305.py
new file mode 100644
index 00000000..7fcf4a50
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/poly1305.py
@@ -0,0 +1,60 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends.openssl.poly1305 import _Poly1305Context
+
+
+class Poly1305:
+ _ctx: typing.Optional[_Poly1305Context]
+
+ def __init__(self, key: bytes):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.poly1305_supported():
+ raise UnsupportedAlgorithm(
+ "poly1305 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_MAC,
+ )
+ self._ctx = backend.create_poly1305_ctx(key)
+
+ def update(self, data: bytes) -> None:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ utils._check_byteslike("data", data)
+ self._ctx.update(data)
+
+ def finalize(self) -> bytes:
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ mac = self._ctx.finalize()
+ self._ctx = None
+ return mac
+
+ def verify(self, tag: bytes) -> None:
+ utils._check_bytes("tag", tag)
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ ctx, self._ctx = self._ctx, None
+ ctx.verify(tag)
+
+ @classmethod
+ def generate_tag(cls, key: bytes, data: bytes) -> bytes:
+ p = Poly1305(key)
+ p.update(data)
+ return p.finalize()
+
+ @classmethod
+ def verify_tag(cls, key: bytes, data: bytes, tag: bytes) -> None:
+ p = Poly1305(key)
+ p.update(data)
+ p.verify(tag)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__init__.py
new file mode 100644
index 00000000..213c4995
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/__init__.py
@@ -0,0 +1,62 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+from cryptography.hazmat.primitives._serialization import (
+ BestAvailableEncryption,
+ Encoding,
+ KeySerializationEncryption,
+ NoEncryption,
+ ParameterFormat,
+ PrivateFormat,
+ PublicFormat,
+ _KeySerializationEncryption,
+)
+from cryptography.hazmat.primitives.serialization.base import (
+ load_der_parameters,
+ load_der_private_key,
+ load_der_public_key,
+ load_pem_parameters,
+ load_pem_private_key,
+ load_pem_public_key,
+)
+from cryptography.hazmat.primitives.serialization.ssh import (
+ SSHCertificate,
+ SSHCertificateBuilder,
+ SSHCertificateType,
+ SSHCertPrivateKeyTypes,
+ SSHCertPublicKeyTypes,
+ SSHPrivateKeyTypes,
+ SSHPublicKeyTypes,
+ load_ssh_private_key,
+ load_ssh_public_identity,
+ load_ssh_public_key,
+)
+
+__all__ = [
+ "load_der_parameters",
+ "load_der_private_key",
+ "load_der_public_key",
+ "load_pem_parameters",
+ "load_pem_private_key",
+ "load_pem_public_key",
+ "load_ssh_private_key",
+ "load_ssh_public_identity",
+ "load_ssh_public_key",
+ "Encoding",
+ "PrivateFormat",
+ "PublicFormat",
+ "ParameterFormat",
+ "KeySerializationEncryption",
+ "BestAvailableEncryption",
+ "NoEncryption",
+ "_KeySerializationEncryption",
+ "SSHCertificateBuilder",
+ "SSHCertificate",
+ "SSHCertificateType",
+ "SSHCertPublicKeyTypes",
+ "SSHCertPrivateKeyTypes",
+ "SSHPrivateKeyTypes",
+ "SSHPublicKeyTypes",
+]
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/base.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/base.py
new file mode 100644
index 00000000..7956ce0f
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/base.py
@@ -0,0 +1,72 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import typing
+
+from cryptography.hazmat.primitives.asymmetric import dh
+from cryptography.hazmat.primitives.asymmetric.types import (
+ PrivateKeyTypes,
+ PublicKeyTypes,
+)
+
+
+def load_pem_private_key(
+ data: bytes,
+ password: typing.Optional[bytes],
+ backend: typing.Any = None,
+ *,
+ unsafe_skip_rsa_key_validation: bool = False,
+) -> PrivateKeyTypes:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_pem_private_key(
+ data, password, unsafe_skip_rsa_key_validation
+ )
+
+
+def load_pem_public_key(
+ data: bytes, backend: typing.Any = None
+) -> PublicKeyTypes:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_pem_public_key(data)
+
+
+def load_pem_parameters(
+ data: bytes, backend: typing.Any = None
+) -> "dh.DHParameters":
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_pem_parameters(data)
+
+
+def load_der_private_key(
+ data: bytes,
+ password: typing.Optional[bytes],
+ backend: typing.Any = None,
+ *,
+ unsafe_skip_rsa_key_validation: bool = False,
+) -> PrivateKeyTypes:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_der_private_key(
+ data, password, unsafe_skip_rsa_key_validation
+ )
+
+
+def load_der_public_key(
+ data: bytes, backend: typing.Any = None
+) -> PublicKeyTypes:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_der_public_key(data)
+
+
+def load_der_parameters(
+ data: bytes, backend: typing.Any = None
+) -> "dh.DHParameters":
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_der_parameters(data)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py
new file mode 100644
index 00000000..1d36146a
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py
@@ -0,0 +1,227 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography import x509
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives._serialization import PBES as PBES
+from cryptography.hazmat.primitives.asymmetric import (
+ dsa,
+ ec,
+ ed448,
+ ed25519,
+ rsa,
+)
+from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
+
+__all__ = [
+ "PBES",
+ "PKCS12PrivateKeyTypes",
+ "PKCS12Certificate",
+ "PKCS12KeyAndCertificates",
+ "load_key_and_certificates",
+ "load_pkcs12",
+ "serialize_key_and_certificates",
+]
+
+PKCS12PrivateKeyTypes = typing.Union[
+ rsa.RSAPrivateKey,
+ dsa.DSAPrivateKey,
+ ec.EllipticCurvePrivateKey,
+ ed25519.Ed25519PrivateKey,
+ ed448.Ed448PrivateKey,
+]
+
+
+class PKCS12Certificate:
+ def __init__(
+ self,
+ cert: x509.Certificate,
+ friendly_name: typing.Optional[bytes],
+ ):
+ if not isinstance(cert, x509.Certificate):
+ raise TypeError("Expecting x509.Certificate object")
+ if friendly_name is not None and not isinstance(friendly_name, bytes):
+ raise TypeError("friendly_name must be bytes or None")
+ self._cert = cert
+ self._friendly_name = friendly_name
+
+ @property
+ def friendly_name(self) -> typing.Optional[bytes]:
+ return self._friendly_name
+
+ @property
+ def certificate(self) -> x509.Certificate:
+ return self._cert
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, PKCS12Certificate):
+ return NotImplemented
+
+ return (
+ self.certificate == other.certificate
+ and self.friendly_name == other.friendly_name
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.certificate, self.friendly_name))
+
+ def __repr__(self) -> str:
+ return "".format(
+ self.certificate, self.friendly_name
+ )
+
+
+class PKCS12KeyAndCertificates:
+ def __init__(
+ self,
+ key: typing.Optional[PrivateKeyTypes],
+ cert: typing.Optional[PKCS12Certificate],
+ additional_certs: typing.List[PKCS12Certificate],
+ ):
+ if key is not None and not isinstance(
+ key,
+ (
+ rsa.RSAPrivateKey,
+ dsa.DSAPrivateKey,
+ ec.EllipticCurvePrivateKey,
+ ed25519.Ed25519PrivateKey,
+ ed448.Ed448PrivateKey,
+ ),
+ ):
+ raise TypeError(
+ "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448"
+ " private key, or None."
+ )
+ if cert is not None and not isinstance(cert, PKCS12Certificate):
+ raise TypeError("cert must be a PKCS12Certificate object or None")
+ if not all(
+ isinstance(add_cert, PKCS12Certificate)
+ for add_cert in additional_certs
+ ):
+ raise TypeError(
+ "all values in additional_certs must be PKCS12Certificate"
+ " objects"
+ )
+ self._key = key
+ self._cert = cert
+ self._additional_certs = additional_certs
+
+ @property
+ def key(self) -> typing.Optional[PrivateKeyTypes]:
+ return self._key
+
+ @property
+ def cert(self) -> typing.Optional[PKCS12Certificate]:
+ return self._cert
+
+ @property
+ def additional_certs(self) -> typing.List[PKCS12Certificate]:
+ return self._additional_certs
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, PKCS12KeyAndCertificates):
+ return NotImplemented
+
+ return (
+ self.key == other.key
+ and self.cert == other.cert
+ and self.additional_certs == other.additional_certs
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.key, self.cert, tuple(self.additional_certs)))
+
+ def __repr__(self) -> str:
+ fmt = (
+ ""
+ )
+ return fmt.format(self.key, self.cert, self.additional_certs)
+
+
+def load_key_and_certificates(
+ data: bytes,
+ password: typing.Optional[bytes],
+ backend: typing.Any = None,
+) -> typing.Tuple[
+ typing.Optional[PrivateKeyTypes],
+ typing.Optional[x509.Certificate],
+ typing.List[x509.Certificate],
+]:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_key_and_certificates_from_pkcs12(data, password)
+
+
+def load_pkcs12(
+ data: bytes,
+ password: typing.Optional[bytes],
+ backend: typing.Any = None,
+) -> PKCS12KeyAndCertificates:
+ from cryptography.hazmat.backends.openssl.backend import backend as ossl
+
+ return ossl.load_pkcs12(data, password)
+
+
+_PKCS12CATypes = typing.Union[
+ x509.Certificate,
+ PKCS12Certificate,
+]
+
+
+def serialize_key_and_certificates(
+ name: typing.Optional[bytes],
+ key: typing.Optional[PKCS12PrivateKeyTypes],
+ cert: typing.Optional[x509.Certificate],
+ cas: typing.Optional[typing.Iterable[_PKCS12CATypes]],
+ encryption_algorithm: serialization.KeySerializationEncryption,
+) -> bytes:
+ if key is not None and not isinstance(
+ key,
+ (
+ rsa.RSAPrivateKey,
+ dsa.DSAPrivateKey,
+ ec.EllipticCurvePrivateKey,
+ ed25519.Ed25519PrivateKey,
+ ed448.Ed448PrivateKey,
+ ),
+ ):
+ raise TypeError(
+ "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448"
+ " private key, or None."
+ )
+ if cert is not None and not isinstance(cert, x509.Certificate):
+ raise TypeError("cert must be a certificate or None")
+
+ if cas is not None:
+ cas = list(cas)
+ if not all(
+ isinstance(
+ val,
+ (
+ x509.Certificate,
+ PKCS12Certificate,
+ ),
+ )
+ for val in cas
+ ):
+ raise TypeError("all values in cas must be certificates")
+
+ if not isinstance(
+ encryption_algorithm, serialization.KeySerializationEncryption
+ ):
+ raise TypeError(
+ "Key encryption algorithm must be a "
+ "KeySerializationEncryption instance"
+ )
+
+ if key is None and cert is None and not cas:
+ raise ValueError("You must supply at least one of key, cert, or cas")
+
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.serialize_key_and_certificates_to_pkcs12(
+ name, key, cert, cas, encryption_algorithm
+ )
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py
new file mode 100644
index 00000000..0a72e0df
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py
@@ -0,0 +1,233 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import email.base64mime
+import email.generator
+import email.message
+import email.policy
+import io
+import typing
+
+from cryptography import utils, x509
+from cryptography.hazmat.bindings._rust import pkcs7 as rust_pkcs7
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import ec, rsa
+from cryptography.utils import _check_byteslike
+
+
+def load_pem_pkcs7_certificates(data: bytes) -> typing.List[x509.Certificate]:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.load_pem_pkcs7_certificates(data)
+
+
+def load_der_pkcs7_certificates(data: bytes) -> typing.List[x509.Certificate]:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.load_der_pkcs7_certificates(data)
+
+
+def serialize_certificates(
+ certs: typing.List[x509.Certificate],
+ encoding: serialization.Encoding,
+) -> bytes:
+ return rust_pkcs7.serialize_certificates(certs, encoding)
+
+
+PKCS7HashTypes = typing.Union[
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+]
+
+PKCS7PrivateKeyTypes = typing.Union[
+ rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey
+]
+
+
+class PKCS7Options(utils.Enum):
+ Text = "Add text/plain MIME type"
+ Binary = "Don't translate input data into canonical MIME format"
+ DetachedSignature = "Don't embed data in the PKCS7 structure"
+ NoCapabilities = "Don't embed SMIME capabilities"
+ NoAttributes = "Don't embed authenticatedAttributes"
+ NoCerts = "Don't embed signer certificate"
+
+
+class PKCS7SignatureBuilder:
+ def __init__(
+ self,
+ data: typing.Optional[bytes] = None,
+ signers: typing.List[
+ typing.Tuple[
+ x509.Certificate,
+ PKCS7PrivateKeyTypes,
+ PKCS7HashTypes,
+ ]
+ ] = [],
+ additional_certs: typing.List[x509.Certificate] = [],
+ ):
+ self._data = data
+ self._signers = signers
+ self._additional_certs = additional_certs
+
+ def set_data(self, data: bytes) -> "PKCS7SignatureBuilder":
+ _check_byteslike("data", data)
+ if self._data is not None:
+ raise ValueError("data may only be set once")
+
+ return PKCS7SignatureBuilder(data, self._signers)
+
+ def add_signer(
+ self,
+ certificate: x509.Certificate,
+ private_key: PKCS7PrivateKeyTypes,
+ hash_algorithm: PKCS7HashTypes,
+ ) -> "PKCS7SignatureBuilder":
+ if not isinstance(
+ hash_algorithm,
+ (
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+ ),
+ ):
+ raise TypeError(
+ "hash_algorithm must be one of hashes.SHA224, "
+ "SHA256, SHA384, or SHA512"
+ )
+ if not isinstance(certificate, x509.Certificate):
+ raise TypeError("certificate must be a x509.Certificate")
+
+ if not isinstance(
+ private_key, (rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey)
+ ):
+ raise TypeError("Only RSA & EC keys are supported at this time.")
+
+ return PKCS7SignatureBuilder(
+ self._data,
+ self._signers + [(certificate, private_key, hash_algorithm)],
+ )
+
+ def add_certificate(
+ self, certificate: x509.Certificate
+ ) -> "PKCS7SignatureBuilder":
+ if not isinstance(certificate, x509.Certificate):
+ raise TypeError("certificate must be a x509.Certificate")
+
+ return PKCS7SignatureBuilder(
+ self._data, self._signers, self._additional_certs + [certificate]
+ )
+
+ def sign(
+ self,
+ encoding: serialization.Encoding,
+ options: typing.Iterable[PKCS7Options],
+ backend: typing.Any = None,
+ ) -> bytes:
+ if len(self._signers) == 0:
+ raise ValueError("Must have at least one signer")
+ if self._data is None:
+ raise ValueError("You must add data to sign")
+ options = list(options)
+ if not all(isinstance(x, PKCS7Options) for x in options):
+ raise ValueError("options must be from the PKCS7Options enum")
+ if encoding not in (
+ serialization.Encoding.PEM,
+ serialization.Encoding.DER,
+ serialization.Encoding.SMIME,
+ ):
+ raise ValueError(
+ "Must be PEM, DER, or SMIME from the Encoding enum"
+ )
+
+ # Text is a meaningless option unless it is accompanied by
+ # DetachedSignature
+ if (
+ PKCS7Options.Text in options
+ and PKCS7Options.DetachedSignature not in options
+ ):
+ raise ValueError(
+ "When passing the Text option you must also pass "
+ "DetachedSignature"
+ )
+
+ if PKCS7Options.Text in options and encoding in (
+ serialization.Encoding.DER,
+ serialization.Encoding.PEM,
+ ):
+ raise ValueError(
+ "The Text option is only available for SMIME serialization"
+ )
+
+ # No attributes implies no capabilities so we'll error if you try to
+ # pass both.
+ if (
+ PKCS7Options.NoAttributes in options
+ and PKCS7Options.NoCapabilities in options
+ ):
+ raise ValueError(
+ "NoAttributes is a superset of NoCapabilities. Do not pass "
+ "both values."
+ )
+
+ return rust_pkcs7.sign_and_serialize(self, encoding, options)
+
+
+def _smime_encode(
+ data: bytes, signature: bytes, micalg: str, text_mode: bool
+) -> bytes:
+ # This function works pretty hard to replicate what OpenSSL does
+ # precisely. For good and for ill.
+
+ m = email.message.Message()
+ m.add_header("MIME-Version", "1.0")
+ m.add_header(
+ "Content-Type",
+ "multipart/signed",
+ protocol="application/x-pkcs7-signature",
+ micalg=micalg,
+ )
+
+ m.preamble = "This is an S/MIME signed message\n"
+
+ msg_part = OpenSSLMimePart()
+ msg_part.set_payload(data)
+ if text_mode:
+ msg_part.add_header("Content-Type", "text/plain")
+ m.attach(msg_part)
+
+ sig_part = email.message.MIMEPart()
+ sig_part.add_header(
+ "Content-Type", "application/x-pkcs7-signature", name="smime.p7s"
+ )
+ sig_part.add_header("Content-Transfer-Encoding", "base64")
+ sig_part.add_header(
+ "Content-Disposition", "attachment", filename="smime.p7s"
+ )
+ sig_part.set_payload(
+ email.base64mime.body_encode(signature, maxlinelen=65)
+ )
+ del sig_part["MIME-Version"]
+ m.attach(sig_part)
+
+ fp = io.BytesIO()
+ g = email.generator.BytesGenerator(
+ fp,
+ maxheaderlen=0,
+ mangle_from_=False,
+ policy=m.policy.clone(linesep="\r\n"),
+ )
+ g.flatten(m)
+ return fp.getvalue()
+
+
+class OpenSSLMimePart(email.message.MIMEPart):
+ # A MIMEPart subclass that replicates OpenSSL's behavior of not including
+ # a newline if there are no headers.
+ def _write_headers(self, generator) -> None:
+ if list(self.raw_items()):
+ generator._write_headers(self)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/ssh.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/ssh.py
new file mode 100644
index 00000000..fa278d9e
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/serialization/ssh.py
@@ -0,0 +1,1447 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import binascii
+import enum
+import os
+import re
+import typing
+import warnings
+from base64 import encodebytes as _base64_encode
+
+from cryptography import utils
+from cryptography.exceptions import UnsupportedAlgorithm
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.asymmetric import (
+ dsa,
+ ec,
+ ed25519,
+ padding,
+ rsa,
+)
+from cryptography.hazmat.primitives.asymmetric import utils as asym_utils
+from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
+from cryptography.hazmat.primitives.serialization import (
+ Encoding,
+ KeySerializationEncryption,
+ NoEncryption,
+ PrivateFormat,
+ PublicFormat,
+ _KeySerializationEncryption,
+)
+
+try:
+ from bcrypt import kdf as _bcrypt_kdf
+
+ _bcrypt_supported = True
+except ImportError:
+ _bcrypt_supported = False
+
+ def _bcrypt_kdf(
+ password: bytes,
+ salt: bytes,
+ desired_key_bytes: int,
+ rounds: int,
+ ignore_few_rounds: bool = False,
+ ) -> bytes:
+ raise UnsupportedAlgorithm("Need bcrypt module")
+
+
+_SSH_ED25519 = b"ssh-ed25519"
+_SSH_RSA = b"ssh-rsa"
+_SSH_DSA = b"ssh-dss"
+_ECDSA_NISTP256 = b"ecdsa-sha2-nistp256"
+_ECDSA_NISTP384 = b"ecdsa-sha2-nistp384"
+_ECDSA_NISTP521 = b"ecdsa-sha2-nistp521"
+_CERT_SUFFIX = b"-cert-v01@openssh.com"
+
+# These are not key types, only algorithms, so they cannot appear
+# as a public key type
+_SSH_RSA_SHA256 = b"rsa-sha2-256"
+_SSH_RSA_SHA512 = b"rsa-sha2-512"
+
+_SSH_PUBKEY_RC = re.compile(rb"\A(\S+)[ \t]+(\S+)")
+_SK_MAGIC = b"openssh-key-v1\0"
+_SK_START = b"-----BEGIN OPENSSH PRIVATE KEY-----"
+_SK_END = b"-----END OPENSSH PRIVATE KEY-----"
+_BCRYPT = b"bcrypt"
+_NONE = b"none"
+_DEFAULT_CIPHER = b"aes256-ctr"
+_DEFAULT_ROUNDS = 16
+
+# re is only way to work on bytes-like data
+_PEM_RC = re.compile(_SK_START + b"(.*?)" + _SK_END, re.DOTALL)
+
+# padding for max blocksize
+_PADDING = memoryview(bytearray(range(1, 1 + 16)))
+
+# ciphers that are actually used in key wrapping
+_SSH_CIPHERS: typing.Dict[
+ bytes,
+ typing.Tuple[
+ typing.Type[algorithms.AES],
+ int,
+ typing.Union[typing.Type[modes.CTR], typing.Type[modes.CBC]],
+ int,
+ ],
+] = {
+ b"aes256-ctr": (algorithms.AES, 32, modes.CTR, 16),
+ b"aes256-cbc": (algorithms.AES, 32, modes.CBC, 16),
+}
+
+# map local curve name to key type
+_ECDSA_KEY_TYPE = {
+ "secp256r1": _ECDSA_NISTP256,
+ "secp384r1": _ECDSA_NISTP384,
+ "secp521r1": _ECDSA_NISTP521,
+}
+
+
+def _get_ssh_key_type(
+ key: typing.Union["SSHPrivateKeyTypes", "SSHPublicKeyTypes"]
+) -> bytes:
+ if isinstance(key, ec.EllipticCurvePrivateKey):
+ key_type = _ecdsa_key_type(key.public_key())
+ elif isinstance(key, ec.EllipticCurvePublicKey):
+ key_type = _ecdsa_key_type(key)
+ elif isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)):
+ key_type = _SSH_RSA
+ elif isinstance(key, (dsa.DSAPrivateKey, dsa.DSAPublicKey)):
+ key_type = _SSH_DSA
+ elif isinstance(
+ key, (ed25519.Ed25519PrivateKey, ed25519.Ed25519PublicKey)
+ ):
+ key_type = _SSH_ED25519
+ else:
+ raise ValueError("Unsupported key type")
+
+ return key_type
+
+
+def _ecdsa_key_type(public_key: ec.EllipticCurvePublicKey) -> bytes:
+ """Return SSH key_type and curve_name for private key."""
+ curve = public_key.curve
+ if curve.name not in _ECDSA_KEY_TYPE:
+ raise ValueError(
+ f"Unsupported curve for ssh private key: {curve.name!r}"
+ )
+ return _ECDSA_KEY_TYPE[curve.name]
+
+
+def _ssh_pem_encode(
+ data: bytes,
+ prefix: bytes = _SK_START + b"\n",
+ suffix: bytes = _SK_END + b"\n",
+) -> bytes:
+ return b"".join([prefix, _base64_encode(data), suffix])
+
+
+def _check_block_size(data: bytes, block_len: int) -> None:
+ """Require data to be full blocks"""
+ if not data or len(data) % block_len != 0:
+ raise ValueError("Corrupt data: missing padding")
+
+
+def _check_empty(data: bytes) -> None:
+ """All data should have been parsed."""
+ if data:
+ raise ValueError("Corrupt data: unparsed data")
+
+
+def _init_cipher(
+ ciphername: bytes,
+ password: typing.Optional[bytes],
+ salt: bytes,
+ rounds: int,
+) -> Cipher[typing.Union[modes.CBC, modes.CTR]]:
+ """Generate key + iv and return cipher."""
+ if not password:
+ raise ValueError("Key is password-protected.")
+
+ algo, key_len, mode, iv_len = _SSH_CIPHERS[ciphername]
+ seed = _bcrypt_kdf(password, salt, key_len + iv_len, rounds, True)
+ return Cipher(algo(seed[:key_len]), mode(seed[key_len:]))
+
+
+def _get_u32(data: memoryview) -> typing.Tuple[int, memoryview]:
+ """Uint32"""
+ if len(data) < 4:
+ raise ValueError("Invalid data")
+ return int.from_bytes(data[:4], byteorder="big"), data[4:]
+
+
+def _get_u64(data: memoryview) -> typing.Tuple[int, memoryview]:
+ """Uint64"""
+ if len(data) < 8:
+ raise ValueError("Invalid data")
+ return int.from_bytes(data[:8], byteorder="big"), data[8:]
+
+
+def _get_sshstr(data: memoryview) -> typing.Tuple[memoryview, memoryview]:
+ """Bytes with u32 length prefix"""
+ n, data = _get_u32(data)
+ if n > len(data):
+ raise ValueError("Invalid data")
+ return data[:n], data[n:]
+
+
+def _get_mpint(data: memoryview) -> typing.Tuple[int, memoryview]:
+ """Big integer."""
+ val, data = _get_sshstr(data)
+ if val and val[0] > 0x7F:
+ raise ValueError("Invalid data")
+ return int.from_bytes(val, "big"), data
+
+
+def _to_mpint(val: int) -> bytes:
+ """Storage format for signed bigint."""
+ if val < 0:
+ raise ValueError("negative mpint not allowed")
+ if not val:
+ return b""
+ nbytes = (val.bit_length() + 8) // 8
+ return utils.int_to_bytes(val, nbytes)
+
+
+class _FragList:
+ """Build recursive structure without data copy."""
+
+ flist: typing.List[bytes]
+
+ def __init__(
+ self, init: typing.Optional[typing.List[bytes]] = None
+ ) -> None:
+ self.flist = []
+ if init:
+ self.flist.extend(init)
+
+ def put_raw(self, val: bytes) -> None:
+ """Add plain bytes"""
+ self.flist.append(val)
+
+ def put_u32(self, val: int) -> None:
+ """Big-endian uint32"""
+ self.flist.append(val.to_bytes(length=4, byteorder="big"))
+
+ def put_u64(self, val: int) -> None:
+ """Big-endian uint64"""
+ self.flist.append(val.to_bytes(length=8, byteorder="big"))
+
+ def put_sshstr(self, val: typing.Union[bytes, "_FragList"]) -> None:
+ """Bytes prefixed with u32 length"""
+ if isinstance(val, (bytes, memoryview, bytearray)):
+ self.put_u32(len(val))
+ self.flist.append(val)
+ else:
+ self.put_u32(val.size())
+ self.flist.extend(val.flist)
+
+ def put_mpint(self, val: int) -> None:
+ """Big-endian bigint prefixed with u32 length"""
+ self.put_sshstr(_to_mpint(val))
+
+ def size(self) -> int:
+ """Current number of bytes"""
+ return sum(map(len, self.flist))
+
+ def render(self, dstbuf: memoryview, pos: int = 0) -> int:
+ """Write into bytearray"""
+ for frag in self.flist:
+ flen = len(frag)
+ start, pos = pos, pos + flen
+ dstbuf[start:pos] = frag
+ return pos
+
+ def tobytes(self) -> bytes:
+ """Return as bytes"""
+ buf = memoryview(bytearray(self.size()))
+ self.render(buf)
+ return buf.tobytes()
+
+
+class _SSHFormatRSA:
+ """Format for RSA keys.
+
+ Public:
+ mpint e, n
+ Private:
+ mpint n, e, d, iqmp, p, q
+ """
+
+ def get_public(self, data: memoryview):
+ """RSA public fields"""
+ e, data = _get_mpint(data)
+ n, data = _get_mpint(data)
+ return (e, n), data
+
+ def load_public(
+ self, data: memoryview
+ ) -> typing.Tuple[rsa.RSAPublicKey, memoryview]:
+ """Make RSA public key from data."""
+ (e, n), data = self.get_public(data)
+ public_numbers = rsa.RSAPublicNumbers(e, n)
+ public_key = public_numbers.public_key()
+ return public_key, data
+
+ def load_private(
+ self, data: memoryview, pubfields
+ ) -> typing.Tuple[rsa.RSAPrivateKey, memoryview]:
+ """Make RSA private key from data."""
+ n, data = _get_mpint(data)
+ e, data = _get_mpint(data)
+ d, data = _get_mpint(data)
+ iqmp, data = _get_mpint(data)
+ p, data = _get_mpint(data)
+ q, data = _get_mpint(data)
+
+ if (e, n) != pubfields:
+ raise ValueError("Corrupt data: rsa field mismatch")
+ dmp1 = rsa.rsa_crt_dmp1(d, p)
+ dmq1 = rsa.rsa_crt_dmq1(d, q)
+ public_numbers = rsa.RSAPublicNumbers(e, n)
+ private_numbers = rsa.RSAPrivateNumbers(
+ p, q, d, dmp1, dmq1, iqmp, public_numbers
+ )
+ private_key = private_numbers.private_key()
+ return private_key, data
+
+ def encode_public(
+ self, public_key: rsa.RSAPublicKey, f_pub: _FragList
+ ) -> None:
+ """Write RSA public key"""
+ pubn = public_key.public_numbers()
+ f_pub.put_mpint(pubn.e)
+ f_pub.put_mpint(pubn.n)
+
+ def encode_private(
+ self, private_key: rsa.RSAPrivateKey, f_priv: _FragList
+ ) -> None:
+ """Write RSA private key"""
+ private_numbers = private_key.private_numbers()
+ public_numbers = private_numbers.public_numbers
+
+ f_priv.put_mpint(public_numbers.n)
+ f_priv.put_mpint(public_numbers.e)
+
+ f_priv.put_mpint(private_numbers.d)
+ f_priv.put_mpint(private_numbers.iqmp)
+ f_priv.put_mpint(private_numbers.p)
+ f_priv.put_mpint(private_numbers.q)
+
+
+class _SSHFormatDSA:
+ """Format for DSA keys.
+
+ Public:
+ mpint p, q, g, y
+ Private:
+ mpint p, q, g, y, x
+ """
+
+ def get_public(
+ self, data: memoryview
+ ) -> typing.Tuple[typing.Tuple, memoryview]:
+ """DSA public fields"""
+ p, data = _get_mpint(data)
+ q, data = _get_mpint(data)
+ g, data = _get_mpint(data)
+ y, data = _get_mpint(data)
+ return (p, q, g, y), data
+
+ def load_public(
+ self, data: memoryview
+ ) -> typing.Tuple[dsa.DSAPublicKey, memoryview]:
+ """Make DSA public key from data."""
+ (p, q, g, y), data = self.get_public(data)
+ parameter_numbers = dsa.DSAParameterNumbers(p, q, g)
+ public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers)
+ self._validate(public_numbers)
+ public_key = public_numbers.public_key()
+ return public_key, data
+
+ def load_private(
+ self, data: memoryview, pubfields
+ ) -> typing.Tuple[dsa.DSAPrivateKey, memoryview]:
+ """Make DSA private key from data."""
+ (p, q, g, y), data = self.get_public(data)
+ x, data = _get_mpint(data)
+
+ if (p, q, g, y) != pubfields:
+ raise ValueError("Corrupt data: dsa field mismatch")
+ parameter_numbers = dsa.DSAParameterNumbers(p, q, g)
+ public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers)
+ self._validate(public_numbers)
+ private_numbers = dsa.DSAPrivateNumbers(x, public_numbers)
+ private_key = private_numbers.private_key()
+ return private_key, data
+
+ def encode_public(
+ self, public_key: dsa.DSAPublicKey, f_pub: _FragList
+ ) -> None:
+ """Write DSA public key"""
+ public_numbers = public_key.public_numbers()
+ parameter_numbers = public_numbers.parameter_numbers
+ self._validate(public_numbers)
+
+ f_pub.put_mpint(parameter_numbers.p)
+ f_pub.put_mpint(parameter_numbers.q)
+ f_pub.put_mpint(parameter_numbers.g)
+ f_pub.put_mpint(public_numbers.y)
+
+ def encode_private(
+ self, private_key: dsa.DSAPrivateKey, f_priv: _FragList
+ ) -> None:
+ """Write DSA private key"""
+ self.encode_public(private_key.public_key(), f_priv)
+ f_priv.put_mpint(private_key.private_numbers().x)
+
+ def _validate(self, public_numbers: dsa.DSAPublicNumbers) -> None:
+ parameter_numbers = public_numbers.parameter_numbers
+ if parameter_numbers.p.bit_length() != 1024:
+ raise ValueError("SSH supports only 1024 bit DSA keys")
+
+
+class _SSHFormatECDSA:
+ """Format for ECDSA keys.
+
+ Public:
+ str curve
+ bytes point
+ Private:
+ str curve
+ bytes point
+ mpint secret
+ """
+
+ def __init__(self, ssh_curve_name: bytes, curve: ec.EllipticCurve):
+ self.ssh_curve_name = ssh_curve_name
+ self.curve = curve
+
+ def get_public(
+ self, data: memoryview
+ ) -> typing.Tuple[typing.Tuple, memoryview]:
+ """ECDSA public fields"""
+ curve, data = _get_sshstr(data)
+ point, data = _get_sshstr(data)
+ if curve != self.ssh_curve_name:
+ raise ValueError("Curve name mismatch")
+ if point[0] != 4:
+ raise NotImplementedError("Need uncompressed point")
+ return (curve, point), data
+
+ def load_public(
+ self, data: memoryview
+ ) -> typing.Tuple[ec.EllipticCurvePublicKey, memoryview]:
+ """Make ECDSA public key from data."""
+ (curve_name, point), data = self.get_public(data)
+ public_key = ec.EllipticCurvePublicKey.from_encoded_point(
+ self.curve, point.tobytes()
+ )
+ return public_key, data
+
+ def load_private(
+ self, data: memoryview, pubfields
+ ) -> typing.Tuple[ec.EllipticCurvePrivateKey, memoryview]:
+ """Make ECDSA private key from data."""
+ (curve_name, point), data = self.get_public(data)
+ secret, data = _get_mpint(data)
+
+ if (curve_name, point) != pubfields:
+ raise ValueError("Corrupt data: ecdsa field mismatch")
+ private_key = ec.derive_private_key(secret, self.curve)
+ return private_key, data
+
+ def encode_public(
+ self, public_key: ec.EllipticCurvePublicKey, f_pub: _FragList
+ ) -> None:
+ """Write ECDSA public key"""
+ point = public_key.public_bytes(
+ Encoding.X962, PublicFormat.UncompressedPoint
+ )
+ f_pub.put_sshstr(self.ssh_curve_name)
+ f_pub.put_sshstr(point)
+
+ def encode_private(
+ self, private_key: ec.EllipticCurvePrivateKey, f_priv: _FragList
+ ) -> None:
+ """Write ECDSA private key"""
+ public_key = private_key.public_key()
+ private_numbers = private_key.private_numbers()
+
+ self.encode_public(public_key, f_priv)
+ f_priv.put_mpint(private_numbers.private_value)
+
+
+class _SSHFormatEd25519:
+ """Format for Ed25519 keys.
+
+ Public:
+ bytes point
+ Private:
+ bytes point
+ bytes secret_and_point
+ """
+
+ def get_public(
+ self, data: memoryview
+ ) -> typing.Tuple[typing.Tuple, memoryview]:
+ """Ed25519 public fields"""
+ point, data = _get_sshstr(data)
+ return (point,), data
+
+ def load_public(
+ self, data: memoryview
+ ) -> typing.Tuple[ed25519.Ed25519PublicKey, memoryview]:
+ """Make Ed25519 public key from data."""
+ (point,), data = self.get_public(data)
+ public_key = ed25519.Ed25519PublicKey.from_public_bytes(
+ point.tobytes()
+ )
+ return public_key, data
+
+ def load_private(
+ self, data: memoryview, pubfields
+ ) -> typing.Tuple[ed25519.Ed25519PrivateKey, memoryview]:
+ """Make Ed25519 private key from data."""
+ (point,), data = self.get_public(data)
+ keypair, data = _get_sshstr(data)
+
+ secret = keypair[:32]
+ point2 = keypair[32:]
+ if point != point2 or (point,) != pubfields:
+ raise ValueError("Corrupt data: ed25519 field mismatch")
+ private_key = ed25519.Ed25519PrivateKey.from_private_bytes(secret)
+ return private_key, data
+
+ def encode_public(
+ self, public_key: ed25519.Ed25519PublicKey, f_pub: _FragList
+ ) -> None:
+ """Write Ed25519 public key"""
+ raw_public_key = public_key.public_bytes(
+ Encoding.Raw, PublicFormat.Raw
+ )
+ f_pub.put_sshstr(raw_public_key)
+
+ def encode_private(
+ self, private_key: ed25519.Ed25519PrivateKey, f_priv: _FragList
+ ) -> None:
+ """Write Ed25519 private key"""
+ public_key = private_key.public_key()
+ raw_private_key = private_key.private_bytes(
+ Encoding.Raw, PrivateFormat.Raw, NoEncryption()
+ )
+ raw_public_key = public_key.public_bytes(
+ Encoding.Raw, PublicFormat.Raw
+ )
+ f_keypair = _FragList([raw_private_key, raw_public_key])
+
+ self.encode_public(public_key, f_priv)
+ f_priv.put_sshstr(f_keypair)
+
+
+_KEY_FORMATS = {
+ _SSH_RSA: _SSHFormatRSA(),
+ _SSH_DSA: _SSHFormatDSA(),
+ _SSH_ED25519: _SSHFormatEd25519(),
+ _ECDSA_NISTP256: _SSHFormatECDSA(b"nistp256", ec.SECP256R1()),
+ _ECDSA_NISTP384: _SSHFormatECDSA(b"nistp384", ec.SECP384R1()),
+ _ECDSA_NISTP521: _SSHFormatECDSA(b"nistp521", ec.SECP521R1()),
+}
+
+
+def _lookup_kformat(key_type: bytes):
+ """Return valid format or throw error"""
+ if not isinstance(key_type, bytes):
+ key_type = memoryview(key_type).tobytes()
+ if key_type in _KEY_FORMATS:
+ return _KEY_FORMATS[key_type]
+ raise UnsupportedAlgorithm(f"Unsupported key type: {key_type!r}")
+
+
+SSHPrivateKeyTypes = typing.Union[
+ ec.EllipticCurvePrivateKey,
+ rsa.RSAPrivateKey,
+ dsa.DSAPrivateKey,
+ ed25519.Ed25519PrivateKey,
+]
+
+
+def load_ssh_private_key(
+ data: bytes,
+ password: typing.Optional[bytes],
+ backend: typing.Any = None,
+) -> SSHPrivateKeyTypes:
+ """Load private key from OpenSSH custom encoding."""
+ utils._check_byteslike("data", data)
+ if password is not None:
+ utils._check_bytes("password", password)
+
+ m = _PEM_RC.search(data)
+ if not m:
+ raise ValueError("Not OpenSSH private key format")
+ p1 = m.start(1)
+ p2 = m.end(1)
+ data = binascii.a2b_base64(memoryview(data)[p1:p2])
+ if not data.startswith(_SK_MAGIC):
+ raise ValueError("Not OpenSSH private key format")
+ data = memoryview(data)[len(_SK_MAGIC) :]
+
+ # parse header
+ ciphername, data = _get_sshstr(data)
+ kdfname, data = _get_sshstr(data)
+ kdfoptions, data = _get_sshstr(data)
+ nkeys, data = _get_u32(data)
+ if nkeys != 1:
+ raise ValueError("Only one key supported")
+
+ # load public key data
+ pubdata, data = _get_sshstr(data)
+ pub_key_type, pubdata = _get_sshstr(pubdata)
+ kformat = _lookup_kformat(pub_key_type)
+ pubfields, pubdata = kformat.get_public(pubdata)
+ _check_empty(pubdata)
+
+ # load secret data
+ edata, data = _get_sshstr(data)
+ _check_empty(data)
+
+ if (ciphername, kdfname) != (_NONE, _NONE):
+ ciphername_bytes = ciphername.tobytes()
+ if ciphername_bytes not in _SSH_CIPHERS:
+ raise UnsupportedAlgorithm(
+ f"Unsupported cipher: {ciphername_bytes!r}"
+ )
+ if kdfname != _BCRYPT:
+ raise UnsupportedAlgorithm(f"Unsupported KDF: {kdfname!r}")
+ blklen = _SSH_CIPHERS[ciphername_bytes][3]
+ _check_block_size(edata, blklen)
+ salt, kbuf = _get_sshstr(kdfoptions)
+ rounds, kbuf = _get_u32(kbuf)
+ _check_empty(kbuf)
+ ciph = _init_cipher(ciphername_bytes, password, salt.tobytes(), rounds)
+ edata = memoryview(ciph.decryptor().update(edata))
+ else:
+ blklen = 8
+ _check_block_size(edata, blklen)
+ ck1, edata = _get_u32(edata)
+ ck2, edata = _get_u32(edata)
+ if ck1 != ck2:
+ raise ValueError("Corrupt data: broken checksum")
+
+ # load per-key struct
+ key_type, edata = _get_sshstr(edata)
+ if key_type != pub_key_type:
+ raise ValueError("Corrupt data: key type mismatch")
+ private_key, edata = kformat.load_private(edata, pubfields)
+ comment, edata = _get_sshstr(edata)
+
+ # yes, SSH does padding check *after* all other parsing is done.
+ # need to follow as it writes zero-byte padding too.
+ if edata != _PADDING[: len(edata)]:
+ raise ValueError("Corrupt data: invalid padding")
+
+ if isinstance(private_key, dsa.DSAPrivateKey):
+ warnings.warn(
+ "SSH DSA keys are deprecated and will be removed in a future "
+ "release.",
+ utils.DeprecatedIn40,
+ stacklevel=2,
+ )
+
+ return private_key
+
+
+def _serialize_ssh_private_key(
+ private_key: SSHPrivateKeyTypes,
+ password: bytes,
+ encryption_algorithm: KeySerializationEncryption,
+) -> bytes:
+ """Serialize private key with OpenSSH custom encoding."""
+ utils._check_bytes("password", password)
+ if isinstance(private_key, dsa.DSAPrivateKey):
+ warnings.warn(
+ "SSH DSA key support is deprecated and will be "
+ "removed in a future release",
+ utils.DeprecatedIn40,
+ stacklevel=4,
+ )
+
+ key_type = _get_ssh_key_type(private_key)
+ kformat = _lookup_kformat(key_type)
+
+ # setup parameters
+ f_kdfoptions = _FragList()
+ if password:
+ ciphername = _DEFAULT_CIPHER
+ blklen = _SSH_CIPHERS[ciphername][3]
+ kdfname = _BCRYPT
+ rounds = _DEFAULT_ROUNDS
+ if (
+ isinstance(encryption_algorithm, _KeySerializationEncryption)
+ and encryption_algorithm._kdf_rounds is not None
+ ):
+ rounds = encryption_algorithm._kdf_rounds
+ salt = os.urandom(16)
+ f_kdfoptions.put_sshstr(salt)
+ f_kdfoptions.put_u32(rounds)
+ ciph = _init_cipher(ciphername, password, salt, rounds)
+ else:
+ ciphername = kdfname = _NONE
+ blklen = 8
+ ciph = None
+ nkeys = 1
+ checkval = os.urandom(4)
+ comment = b""
+
+ # encode public and private parts together
+ f_public_key = _FragList()
+ f_public_key.put_sshstr(key_type)
+ kformat.encode_public(private_key.public_key(), f_public_key)
+
+ f_secrets = _FragList([checkval, checkval])
+ f_secrets.put_sshstr(key_type)
+ kformat.encode_private(private_key, f_secrets)
+ f_secrets.put_sshstr(comment)
+ f_secrets.put_raw(_PADDING[: blklen - (f_secrets.size() % blklen)])
+
+ # top-level structure
+ f_main = _FragList()
+ f_main.put_raw(_SK_MAGIC)
+ f_main.put_sshstr(ciphername)
+ f_main.put_sshstr(kdfname)
+ f_main.put_sshstr(f_kdfoptions)
+ f_main.put_u32(nkeys)
+ f_main.put_sshstr(f_public_key)
+ f_main.put_sshstr(f_secrets)
+
+ # copy result info bytearray
+ slen = f_secrets.size()
+ mlen = f_main.size()
+ buf = memoryview(bytearray(mlen + blklen))
+ f_main.render(buf)
+ ofs = mlen - slen
+
+ # encrypt in-place
+ if ciph is not None:
+ ciph.encryptor().update_into(buf[ofs:mlen], buf[ofs:])
+
+ return _ssh_pem_encode(buf[:mlen])
+
+
+SSHPublicKeyTypes = typing.Union[
+ ec.EllipticCurvePublicKey,
+ rsa.RSAPublicKey,
+ dsa.DSAPublicKey,
+ ed25519.Ed25519PublicKey,
+]
+
+SSHCertPublicKeyTypes = typing.Union[
+ ec.EllipticCurvePublicKey,
+ rsa.RSAPublicKey,
+ ed25519.Ed25519PublicKey,
+]
+
+
+class SSHCertificateType(enum.Enum):
+ USER = 1
+ HOST = 2
+
+
+class SSHCertificate:
+ def __init__(
+ self,
+ _nonce: memoryview,
+ _public_key: SSHPublicKeyTypes,
+ _serial: int,
+ _cctype: int,
+ _key_id: memoryview,
+ _valid_principals: typing.List[bytes],
+ _valid_after: int,
+ _valid_before: int,
+ _critical_options: typing.Dict[bytes, bytes],
+ _extensions: typing.Dict[bytes, bytes],
+ _sig_type: memoryview,
+ _sig_key: memoryview,
+ _inner_sig_type: memoryview,
+ _signature: memoryview,
+ _tbs_cert_body: memoryview,
+ _cert_key_type: bytes,
+ _cert_body: memoryview,
+ ):
+ self._nonce = _nonce
+ self._public_key = _public_key
+ self._serial = _serial
+ try:
+ self._type = SSHCertificateType(_cctype)
+ except ValueError:
+ raise ValueError("Invalid certificate type")
+ self._key_id = _key_id
+ self._valid_principals = _valid_principals
+ self._valid_after = _valid_after
+ self._valid_before = _valid_before
+ self._critical_options = _critical_options
+ self._extensions = _extensions
+ self._sig_type = _sig_type
+ self._sig_key = _sig_key
+ self._inner_sig_type = _inner_sig_type
+ self._signature = _signature
+ self._cert_key_type = _cert_key_type
+ self._cert_body = _cert_body
+ self._tbs_cert_body = _tbs_cert_body
+
+ @property
+ def nonce(self) -> bytes:
+ return bytes(self._nonce)
+
+ def public_key(self) -> SSHCertPublicKeyTypes:
+ # make mypy happy until we remove DSA support entirely and
+ # the underlying union won't have a disallowed type
+ return typing.cast(SSHCertPublicKeyTypes, self._public_key)
+
+ @property
+ def serial(self) -> int:
+ return self._serial
+
+ @property
+ def type(self) -> SSHCertificateType:
+ return self._type
+
+ @property
+ def key_id(self) -> bytes:
+ return bytes(self._key_id)
+
+ @property
+ def valid_principals(self) -> typing.List[bytes]:
+ return self._valid_principals
+
+ @property
+ def valid_before(self) -> int:
+ return self._valid_before
+
+ @property
+ def valid_after(self) -> int:
+ return self._valid_after
+
+ @property
+ def critical_options(self) -> typing.Dict[bytes, bytes]:
+ return self._critical_options
+
+ @property
+ def extensions(self) -> typing.Dict[bytes, bytes]:
+ return self._extensions
+
+ def signature_key(self) -> SSHCertPublicKeyTypes:
+ sigformat = _lookup_kformat(self._sig_type)
+ signature_key, sigkey_rest = sigformat.load_public(self._sig_key)
+ _check_empty(sigkey_rest)
+ return signature_key
+
+ def public_bytes(self) -> bytes:
+ return (
+ bytes(self._cert_key_type)
+ + b" "
+ + binascii.b2a_base64(bytes(self._cert_body), newline=False)
+ )
+
+ def verify_cert_signature(self) -> None:
+ signature_key = self.signature_key()
+ if isinstance(signature_key, ed25519.Ed25519PublicKey):
+ signature_key.verify(
+ bytes(self._signature), bytes(self._tbs_cert_body)
+ )
+ elif isinstance(signature_key, ec.EllipticCurvePublicKey):
+ # The signature is encoded as a pair of big-endian integers
+ r, data = _get_mpint(self._signature)
+ s, data = _get_mpint(data)
+ _check_empty(data)
+ computed_sig = asym_utils.encode_dss_signature(r, s)
+ hash_alg = _get_ec_hash_alg(signature_key.curve)
+ signature_key.verify(
+ computed_sig, bytes(self._tbs_cert_body), ec.ECDSA(hash_alg)
+ )
+ else:
+ assert isinstance(signature_key, rsa.RSAPublicKey)
+ if self._inner_sig_type == _SSH_RSA:
+ hash_alg = hashes.SHA1()
+ elif self._inner_sig_type == _SSH_RSA_SHA256:
+ hash_alg = hashes.SHA256()
+ else:
+ assert self._inner_sig_type == _SSH_RSA_SHA512
+ hash_alg = hashes.SHA512()
+ signature_key.verify(
+ bytes(self._signature),
+ bytes(self._tbs_cert_body),
+ padding.PKCS1v15(),
+ hash_alg,
+ )
+
+
+def _get_ec_hash_alg(curve: ec.EllipticCurve) -> hashes.HashAlgorithm:
+ if isinstance(curve, ec.SECP256R1):
+ return hashes.SHA256()
+ elif isinstance(curve, ec.SECP384R1):
+ return hashes.SHA384()
+ else:
+ assert isinstance(curve, ec.SECP521R1)
+ return hashes.SHA512()
+
+
+def _load_ssh_public_identity(
+ data: bytes,
+ _legacy_dsa_allowed=False,
+) -> typing.Union[SSHCertificate, SSHPublicKeyTypes]:
+ utils._check_byteslike("data", data)
+
+ m = _SSH_PUBKEY_RC.match(data)
+ if not m:
+ raise ValueError("Invalid line format")
+ key_type = orig_key_type = m.group(1)
+ key_body = m.group(2)
+ with_cert = False
+ if key_type.endswith(_CERT_SUFFIX):
+ with_cert = True
+ key_type = key_type[: -len(_CERT_SUFFIX)]
+ if key_type == _SSH_DSA and not _legacy_dsa_allowed:
+ raise UnsupportedAlgorithm(
+ "DSA keys aren't supported in SSH certificates"
+ )
+ kformat = _lookup_kformat(key_type)
+
+ try:
+ rest = memoryview(binascii.a2b_base64(key_body))
+ except (TypeError, binascii.Error):
+ raise ValueError("Invalid format")
+
+ if with_cert:
+ cert_body = rest
+ inner_key_type, rest = _get_sshstr(rest)
+ if inner_key_type != orig_key_type:
+ raise ValueError("Invalid key format")
+ if with_cert:
+ nonce, rest = _get_sshstr(rest)
+ public_key, rest = kformat.load_public(rest)
+ if with_cert:
+ serial, rest = _get_u64(rest)
+ cctype, rest = _get_u32(rest)
+ key_id, rest = _get_sshstr(rest)
+ principals, rest = _get_sshstr(rest)
+ valid_principals = []
+ while principals:
+ principal, principals = _get_sshstr(principals)
+ valid_principals.append(bytes(principal))
+ valid_after, rest = _get_u64(rest)
+ valid_before, rest = _get_u64(rest)
+ crit_options, rest = _get_sshstr(rest)
+ critical_options = _parse_exts_opts(crit_options)
+ exts, rest = _get_sshstr(rest)
+ extensions = _parse_exts_opts(exts)
+ # Get the reserved field, which is unused.
+ _, rest = _get_sshstr(rest)
+ sig_key_raw, rest = _get_sshstr(rest)
+ sig_type, sig_key = _get_sshstr(sig_key_raw)
+ if sig_type == _SSH_DSA and not _legacy_dsa_allowed:
+ raise UnsupportedAlgorithm(
+ "DSA signatures aren't supported in SSH certificates"
+ )
+ # Get the entire cert body and subtract the signature
+ tbs_cert_body = cert_body[: -len(rest)]
+ signature_raw, rest = _get_sshstr(rest)
+ _check_empty(rest)
+ inner_sig_type, sig_rest = _get_sshstr(signature_raw)
+ # RSA certs can have multiple algorithm types
+ if (
+ sig_type == _SSH_RSA
+ and inner_sig_type
+ not in [_SSH_RSA_SHA256, _SSH_RSA_SHA512, _SSH_RSA]
+ ) or (sig_type != _SSH_RSA and inner_sig_type != sig_type):
+ raise ValueError("Signature key type does not match")
+ signature, sig_rest = _get_sshstr(sig_rest)
+ _check_empty(sig_rest)
+ return SSHCertificate(
+ nonce,
+ public_key,
+ serial,
+ cctype,
+ key_id,
+ valid_principals,
+ valid_after,
+ valid_before,
+ critical_options,
+ extensions,
+ sig_type,
+ sig_key,
+ inner_sig_type,
+ signature,
+ tbs_cert_body,
+ orig_key_type,
+ cert_body,
+ )
+ else:
+ _check_empty(rest)
+ return public_key
+
+
+def load_ssh_public_identity(
+ data: bytes,
+) -> typing.Union[SSHCertificate, SSHPublicKeyTypes]:
+ return _load_ssh_public_identity(data)
+
+
+def _parse_exts_opts(exts_opts: memoryview) -> typing.Dict[bytes, bytes]:
+ result: typing.Dict[bytes, bytes] = {}
+ last_name = None
+ while exts_opts:
+ name, exts_opts = _get_sshstr(exts_opts)
+ bname: bytes = bytes(name)
+ if bname in result:
+ raise ValueError("Duplicate name")
+ if last_name is not None and bname < last_name:
+ raise ValueError("Fields not lexically sorted")
+ value, exts_opts = _get_sshstr(exts_opts)
+ result[bname] = bytes(value)
+ last_name = bname
+ return result
+
+
+def load_ssh_public_key(
+ data: bytes, backend: typing.Any = None
+) -> SSHPublicKeyTypes:
+ cert_or_key = _load_ssh_public_identity(data, _legacy_dsa_allowed=True)
+ public_key: SSHPublicKeyTypes
+ if isinstance(cert_or_key, SSHCertificate):
+ public_key = cert_or_key.public_key()
+ else:
+ public_key = cert_or_key
+
+ if isinstance(public_key, dsa.DSAPublicKey):
+ warnings.warn(
+ "SSH DSA keys are deprecated and will be removed in a future "
+ "release.",
+ utils.DeprecatedIn40,
+ stacklevel=2,
+ )
+ return public_key
+
+
+def serialize_ssh_public_key(public_key: SSHPublicKeyTypes) -> bytes:
+ """One-line public key format for OpenSSH"""
+ if isinstance(public_key, dsa.DSAPublicKey):
+ warnings.warn(
+ "SSH DSA key support is deprecated and will be "
+ "removed in a future release",
+ utils.DeprecatedIn40,
+ stacklevel=4,
+ )
+ key_type = _get_ssh_key_type(public_key)
+ kformat = _lookup_kformat(key_type)
+
+ f_pub = _FragList()
+ f_pub.put_sshstr(key_type)
+ kformat.encode_public(public_key, f_pub)
+
+ pub = binascii.b2a_base64(f_pub.tobytes()).strip()
+ return b"".join([key_type, b" ", pub])
+
+
+SSHCertPrivateKeyTypes = typing.Union[
+ ec.EllipticCurvePrivateKey,
+ rsa.RSAPrivateKey,
+ ed25519.Ed25519PrivateKey,
+]
+
+
+# This is an undocumented limit enforced in the openssh codebase for sshd and
+# ssh-keygen, but it is undefined in the ssh certificates spec.
+_SSHKEY_CERT_MAX_PRINCIPALS = 256
+
+
+class SSHCertificateBuilder:
+ def __init__(
+ self,
+ _public_key: typing.Optional[SSHCertPublicKeyTypes] = None,
+ _serial: typing.Optional[int] = None,
+ _type: typing.Optional[SSHCertificateType] = None,
+ _key_id: typing.Optional[bytes] = None,
+ _valid_principals: typing.List[bytes] = [],
+ _valid_for_all_principals: bool = False,
+ _valid_before: typing.Optional[int] = None,
+ _valid_after: typing.Optional[int] = None,
+ _critical_options: typing.List[typing.Tuple[bytes, bytes]] = [],
+ _extensions: typing.List[typing.Tuple[bytes, bytes]] = [],
+ ):
+ self._public_key = _public_key
+ self._serial = _serial
+ self._type = _type
+ self._key_id = _key_id
+ self._valid_principals = _valid_principals
+ self._valid_for_all_principals = _valid_for_all_principals
+ self._valid_before = _valid_before
+ self._valid_after = _valid_after
+ self._critical_options = _critical_options
+ self._extensions = _extensions
+
+ def public_key(
+ self, public_key: SSHCertPublicKeyTypes
+ ) -> "SSHCertificateBuilder":
+ if not isinstance(
+ public_key,
+ (
+ ec.EllipticCurvePublicKey,
+ rsa.RSAPublicKey,
+ ed25519.Ed25519PublicKey,
+ ),
+ ):
+ raise TypeError("Unsupported key type")
+ if self._public_key is not None:
+ raise ValueError("public_key already set")
+
+ return SSHCertificateBuilder(
+ _public_key=public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def serial(self, serial: int) -> "SSHCertificateBuilder":
+ if not isinstance(serial, int):
+ raise TypeError("serial must be an integer")
+ if not 0 <= serial < 2**64:
+ raise ValueError("serial must be between 0 and 2**64")
+ if self._serial is not None:
+ raise ValueError("serial already set")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def type(self, type: SSHCertificateType) -> "SSHCertificateBuilder":
+ if not isinstance(type, SSHCertificateType):
+ raise TypeError("type must be an SSHCertificateType")
+ if self._type is not None:
+ raise ValueError("type already set")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def key_id(self, key_id: bytes) -> "SSHCertificateBuilder":
+ if not isinstance(key_id, bytes):
+ raise TypeError("key_id must be bytes")
+ if self._key_id is not None:
+ raise ValueError("key_id already set")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def valid_principals(
+ self, valid_principals: typing.List[bytes]
+ ) -> "SSHCertificateBuilder":
+ if self._valid_for_all_principals:
+ raise ValueError(
+ "Principals can't be set because the cert is valid "
+ "for all principals"
+ )
+ if (
+ not all(isinstance(x, bytes) for x in valid_principals)
+ or not valid_principals
+ ):
+ raise TypeError(
+ "principals must be a list of bytes and can't be empty"
+ )
+ if self._valid_principals:
+ raise ValueError("valid_principals already set")
+
+ if len(valid_principals) > _SSHKEY_CERT_MAX_PRINCIPALS:
+ raise ValueError(
+ "Reached or exceeded the maximum number of valid_principals"
+ )
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def valid_for_all_principals(self):
+ if self._valid_principals:
+ raise ValueError(
+ "valid_principals already set, can't set "
+ "valid_for_all_principals"
+ )
+ if self._valid_for_all_principals:
+ raise ValueError("valid_for_all_principals already set")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=True,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def valid_before(
+ self, valid_before: typing.Union[int, float]
+ ) -> "SSHCertificateBuilder":
+ if not isinstance(valid_before, (int, float)):
+ raise TypeError("valid_before must be an int or float")
+ valid_before = int(valid_before)
+ if valid_before < 0 or valid_before >= 2**64:
+ raise ValueError("valid_before must [0, 2**64)")
+ if self._valid_before is not None:
+ raise ValueError("valid_before already set")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def valid_after(
+ self, valid_after: typing.Union[int, float]
+ ) -> "SSHCertificateBuilder":
+ if not isinstance(valid_after, (int, float)):
+ raise TypeError("valid_after must be an int or float")
+ valid_after = int(valid_after)
+ if valid_after < 0 or valid_after >= 2**64:
+ raise ValueError("valid_after must [0, 2**64)")
+ if self._valid_after is not None:
+ raise ValueError("valid_after already set")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions,
+ )
+
+ def add_critical_option(
+ self, name: bytes, value: bytes
+ ) -> "SSHCertificateBuilder":
+ if not isinstance(name, bytes) or not isinstance(value, bytes):
+ raise TypeError("name and value must be bytes")
+ # This is O(n**2)
+ if name in [name for name, _ in self._critical_options]:
+ raise ValueError("Duplicate critical option name")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options + [(name, value)],
+ _extensions=self._extensions,
+ )
+
+ def add_extension(
+ self, name: bytes, value: bytes
+ ) -> "SSHCertificateBuilder":
+ if not isinstance(name, bytes) or not isinstance(value, bytes):
+ raise TypeError("name and value must be bytes")
+ # This is O(n**2)
+ if name in [name for name, _ in self._extensions]:
+ raise ValueError("Duplicate extension name")
+
+ return SSHCertificateBuilder(
+ _public_key=self._public_key,
+ _serial=self._serial,
+ _type=self._type,
+ _key_id=self._key_id,
+ _valid_principals=self._valid_principals,
+ _valid_for_all_principals=self._valid_for_all_principals,
+ _valid_before=self._valid_before,
+ _valid_after=self._valid_after,
+ _critical_options=self._critical_options,
+ _extensions=self._extensions + [(name, value)],
+ )
+
+ def sign(self, private_key: SSHCertPrivateKeyTypes) -> SSHCertificate:
+ if not isinstance(
+ private_key,
+ (
+ ec.EllipticCurvePrivateKey,
+ rsa.RSAPrivateKey,
+ ed25519.Ed25519PrivateKey,
+ ),
+ ):
+ raise TypeError("Unsupported private key type")
+
+ if self._public_key is None:
+ raise ValueError("public_key must be set")
+
+ # Not required
+ serial = 0 if self._serial is None else self._serial
+
+ if self._type is None:
+ raise ValueError("type must be set")
+
+ # Not required
+ key_id = b"" if self._key_id is None else self._key_id
+
+ # A zero length list is valid, but means the certificate
+ # is valid for any principal of the specified type. We require
+ # the user to explicitly set valid_for_all_principals to get
+ # that behavior.
+ if not self._valid_principals and not self._valid_for_all_principals:
+ raise ValueError(
+ "valid_principals must be set if valid_for_all_principals "
+ "is False"
+ )
+
+ if self._valid_before is None:
+ raise ValueError("valid_before must be set")
+
+ if self._valid_after is None:
+ raise ValueError("valid_after must be set")
+
+ if self._valid_after > self._valid_before:
+ raise ValueError("valid_after must be earlier than valid_before")
+
+ # lexically sort our byte strings
+ self._critical_options.sort(key=lambda x: x[0])
+ self._extensions.sort(key=lambda x: x[0])
+
+ key_type = _get_ssh_key_type(self._public_key)
+ cert_prefix = key_type + _CERT_SUFFIX
+
+ # Marshal the bytes to be signed
+ nonce = os.urandom(32)
+ kformat = _lookup_kformat(key_type)
+ f = _FragList()
+ f.put_sshstr(cert_prefix)
+ f.put_sshstr(nonce)
+ kformat.encode_public(self._public_key, f)
+ f.put_u64(serial)
+ f.put_u32(self._type.value)
+ f.put_sshstr(key_id)
+ fprincipals = _FragList()
+ for p in self._valid_principals:
+ fprincipals.put_sshstr(p)
+ f.put_sshstr(fprincipals.tobytes())
+ f.put_u64(self._valid_after)
+ f.put_u64(self._valid_before)
+ fcrit = _FragList()
+ for name, value in self._critical_options:
+ fcrit.put_sshstr(name)
+ fcrit.put_sshstr(value)
+ f.put_sshstr(fcrit.tobytes())
+ fext = _FragList()
+ for name, value in self._extensions:
+ fext.put_sshstr(name)
+ fext.put_sshstr(value)
+ f.put_sshstr(fext.tobytes())
+ f.put_sshstr(b"") # RESERVED FIELD
+ # encode CA public key
+ ca_type = _get_ssh_key_type(private_key)
+ caformat = _lookup_kformat(ca_type)
+ caf = _FragList()
+ caf.put_sshstr(ca_type)
+ caformat.encode_public(private_key.public_key(), caf)
+ f.put_sshstr(caf.tobytes())
+ # Sigs according to the rules defined for the CA's public key
+ # (RFC4253 section 6.6 for ssh-rsa, RFC5656 for ECDSA,
+ # and RFC8032 for Ed25519).
+ if isinstance(private_key, ed25519.Ed25519PrivateKey):
+ signature = private_key.sign(f.tobytes())
+ fsig = _FragList()
+ fsig.put_sshstr(ca_type)
+ fsig.put_sshstr(signature)
+ f.put_sshstr(fsig.tobytes())
+ elif isinstance(private_key, ec.EllipticCurvePrivateKey):
+ hash_alg = _get_ec_hash_alg(private_key.curve)
+ signature = private_key.sign(f.tobytes(), ec.ECDSA(hash_alg))
+ r, s = asym_utils.decode_dss_signature(signature)
+ fsig = _FragList()
+ fsig.put_sshstr(ca_type)
+ fsigblob = _FragList()
+ fsigblob.put_mpint(r)
+ fsigblob.put_mpint(s)
+ fsig.put_sshstr(fsigblob.tobytes())
+ f.put_sshstr(fsig.tobytes())
+
+ else:
+ assert isinstance(private_key, rsa.RSAPrivateKey)
+ # Just like Golang, we're going to use SHA512 for RSA
+ # https://cs.opensource.google/go/x/crypto/+/refs/tags/
+ # v0.4.0:ssh/certs.go;l=445
+ # RFC 8332 defines SHA256 and 512 as options
+ fsig = _FragList()
+ fsig.put_sshstr(_SSH_RSA_SHA512)
+ signature = private_key.sign(
+ f.tobytes(), padding.PKCS1v15(), hashes.SHA512()
+ )
+ fsig.put_sshstr(signature)
+ f.put_sshstr(fsig.tobytes())
+
+ cert_data = binascii.b2a_base64(f.tobytes()).strip()
+ # load_ssh_public_identity returns a union, but this is
+ # guaranteed to be an SSHCertificate, so we cast to make
+ # mypy happy.
+ return typing.cast(
+ SSHCertificate,
+ load_ssh_public_identity(b"".join([cert_prefix, b" ", cert_data])),
+ )
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py
new file mode 100644
index 00000000..8a8b30f2
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py
@@ -0,0 +1,7 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+class InvalidToken(Exception):
+ pass
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py
new file mode 100644
index 00000000..26082221
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py
@@ -0,0 +1,91 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import base64
+import typing
+from urllib.parse import quote, urlencode
+
+from cryptography.hazmat.primitives import constant_time, hmac
+from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512
+from cryptography.hazmat.primitives.twofactor import InvalidToken
+
+HOTPHashTypes = typing.Union[SHA1, SHA256, SHA512]
+
+
+def _generate_uri(
+ hotp: "HOTP",
+ type_name: str,
+ account_name: str,
+ issuer: typing.Optional[str],
+ extra_parameters: typing.List[typing.Tuple[str, int]],
+) -> str:
+ parameters = [
+ ("digits", hotp._length),
+ ("secret", base64.b32encode(hotp._key)),
+ ("algorithm", hotp._algorithm.name.upper()),
+ ]
+
+ if issuer is not None:
+ parameters.append(("issuer", issuer))
+
+ parameters.extend(extra_parameters)
+
+ label = (
+ f"{quote(issuer)}:{quote(account_name)}"
+ if issuer
+ else quote(account_name)
+ )
+ return f"otpauth://{type_name}/{label}?{urlencode(parameters)}"
+
+
+class HOTP:
+ def __init__(
+ self,
+ key: bytes,
+ length: int,
+ algorithm: HOTPHashTypes,
+ backend: typing.Any = None,
+ enforce_key_length: bool = True,
+ ) -> None:
+ if len(key) < 16 and enforce_key_length is True:
+ raise ValueError("Key length has to be at least 128 bits.")
+
+ if not isinstance(length, int):
+ raise TypeError("Length parameter must be an integer type.")
+
+ if length < 6 or length > 8:
+ raise ValueError("Length of HOTP has to be between 6 and 8.")
+
+ if not isinstance(algorithm, (SHA1, SHA256, SHA512)):
+ raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.")
+
+ self._key = key
+ self._length = length
+ self._algorithm = algorithm
+
+ def generate(self, counter: int) -> bytes:
+ truncated_value = self._dynamic_truncate(counter)
+ hotp = truncated_value % (10**self._length)
+ return "{0:0{1}}".format(hotp, self._length).encode()
+
+ def verify(self, hotp: bytes, counter: int) -> None:
+ if not constant_time.bytes_eq(self.generate(counter), hotp):
+ raise InvalidToken("Supplied HOTP value does not match.")
+
+ def _dynamic_truncate(self, counter: int) -> int:
+ ctx = hmac.HMAC(self._key, self._algorithm)
+ ctx.update(counter.to_bytes(length=8, byteorder="big"))
+ hmac_value = ctx.finalize()
+
+ offset = hmac_value[len(hmac_value) - 1] & 0b1111
+ p = hmac_value[offset : offset + 4]
+ return int.from_bytes(p, byteorder="big") & 0x7FFFFFFF
+
+ def get_provisioning_uri(
+ self, account_name: str, counter: int, issuer: typing.Optional[str]
+ ) -> str:
+ return _generate_uri(
+ self, "hotp", account_name, issuer, [("counter", int(counter))]
+ )
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/totp.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/totp.py
new file mode 100644
index 00000000..c66fa1de
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/hazmat/primitives/twofactor/totp.py
@@ -0,0 +1,48 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+from cryptography.hazmat.primitives import constant_time
+from cryptography.hazmat.primitives.twofactor import InvalidToken
+from cryptography.hazmat.primitives.twofactor.hotp import (
+ HOTP,
+ HOTPHashTypes,
+ _generate_uri,
+)
+
+
+class TOTP:
+ def __init__(
+ self,
+ key: bytes,
+ length: int,
+ algorithm: HOTPHashTypes,
+ time_step: int,
+ backend: typing.Any = None,
+ enforce_key_length: bool = True,
+ ):
+ self._time_step = time_step
+ self._hotp = HOTP(
+ key, length, algorithm, enforce_key_length=enforce_key_length
+ )
+
+ def generate(self, time: typing.Union[int, float]) -> bytes:
+ counter = int(time / self._time_step)
+ return self._hotp.generate(counter)
+
+ def verify(self, totp: bytes, time: int) -> None:
+ if not constant_time.bytes_eq(self.generate(time), totp):
+ raise InvalidToken("Supplied TOTP value does not match.")
+
+ def get_provisioning_uri(
+ self, account_name: str, issuer: typing.Optional[str]
+ ) -> str:
+ return _generate_uri(
+ self._hotp,
+ "totp",
+ account_name,
+ issuer,
+ [("period", int(self._time_step))],
+ )
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/py.typed b/Voltage Vikings/lib/python3.9/site-packages/cryptography/py.typed
new file mode 100644
index 00000000..e69de29b
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/utils.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/utils.py
new file mode 100644
index 00000000..1a2d490a
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/utils.py
@@ -0,0 +1,129 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import enum
+import sys
+import types
+import typing
+import warnings
+
+
+# We use a UserWarning subclass, instead of DeprecationWarning, because CPython
+# decided deprecation warnings should be invisble by default.
+class CryptographyDeprecationWarning(UserWarning):
+ pass
+
+
+# Several APIs were deprecated with no specific end-of-life date because of the
+# ubiquity of their use. They should not be removed until we agree on when that
+# cycle ends.
+DeprecatedIn36 = CryptographyDeprecationWarning
+DeprecatedIn37 = CryptographyDeprecationWarning
+DeprecatedIn39 = CryptographyDeprecationWarning
+DeprecatedIn40 = CryptographyDeprecationWarning
+
+
+def _check_bytes(name: str, value: bytes) -> None:
+ if not isinstance(value, bytes):
+ raise TypeError(f"{name} must be bytes")
+
+
+def _check_byteslike(name: str, value: bytes) -> None:
+ try:
+ memoryview(value)
+ except TypeError:
+ raise TypeError(f"{name} must be bytes-like")
+
+
+def int_to_bytes(integer: int, length: typing.Optional[int] = None) -> bytes:
+ return integer.to_bytes(
+ length or (integer.bit_length() + 7) // 8 or 1, "big"
+ )
+
+
+def _extract_buffer_length(obj: typing.Any) -> typing.Tuple[int, int]:
+ from cryptography.hazmat.bindings._rust import _openssl
+
+ buf = _openssl.ffi.from_buffer(obj)
+ return int(_openssl.ffi.cast("uintptr_t", buf)), len(buf)
+
+
+class InterfaceNotImplemented(Exception):
+ pass
+
+
+class _DeprecatedValue:
+ def __init__(self, value: object, message: str, warning_class):
+ self.value = value
+ self.message = message
+ self.warning_class = warning_class
+
+
+class _ModuleWithDeprecations(types.ModuleType):
+ def __init__(self, module: types.ModuleType):
+ super().__init__(module.__name__)
+ self.__dict__["_module"] = module
+
+ def __getattr__(self, attr: str) -> object:
+ obj = getattr(self._module, attr)
+ if isinstance(obj, _DeprecatedValue):
+ warnings.warn(obj.message, obj.warning_class, stacklevel=2)
+ obj = obj.value
+ return obj
+
+ def __setattr__(self, attr: str, value: object) -> None:
+ setattr(self._module, attr, value)
+
+ def __delattr__(self, attr: str) -> None:
+ obj = getattr(self._module, attr)
+ if isinstance(obj, _DeprecatedValue):
+ warnings.warn(obj.message, obj.warning_class, stacklevel=2)
+
+ delattr(self._module, attr)
+
+ def __dir__(self) -> typing.Sequence[str]:
+ return ["_module"] + dir(self._module)
+
+
+def deprecated(
+ value: object,
+ module_name: str,
+ message: str,
+ warning_class: typing.Type[Warning],
+ name: typing.Optional[str] = None,
+) -> _DeprecatedValue:
+ module = sys.modules[module_name]
+ if not isinstance(module, _ModuleWithDeprecations):
+ sys.modules[module_name] = module = _ModuleWithDeprecations(module)
+ dv = _DeprecatedValue(value, message, warning_class)
+ # Maintain backwards compatibility with `name is None` for pyOpenSSL.
+ if name is not None:
+ setattr(module, name, dv)
+ return dv
+
+
+def cached_property(func: typing.Callable) -> property:
+ cached_name = f"_cached_{func}"
+ sentinel = object()
+
+ def inner(instance: object):
+ cache = getattr(instance, cached_name, sentinel)
+ if cache is not sentinel:
+ return cache
+ result = func(instance)
+ setattr(instance, cached_name, result)
+ return result
+
+ return property(inner)
+
+
+# Python 3.10 changed representation of enums. We use well-defined object
+# representation and string representation from Python 3.9.
+class Enum(enum.Enum):
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__}.{self._name_}: {self._value_!r}>"
+
+ def __str__(self) -> str:
+ return f"{self.__class__.__name__}.{self._name_}"
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/__init__.py
new file mode 100644
index 00000000..ad924ad4
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/__init__.py
@@ -0,0 +1,250 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+from cryptography.x509 import certificate_transparency
+from cryptography.x509.base import (
+ Attribute,
+ AttributeNotFound,
+ Attributes,
+ Certificate,
+ CertificateBuilder,
+ CertificateRevocationList,
+ CertificateRevocationListBuilder,
+ CertificateSigningRequest,
+ CertificateSigningRequestBuilder,
+ InvalidVersion,
+ RevokedCertificate,
+ RevokedCertificateBuilder,
+ Version,
+ load_der_x509_certificate,
+ load_der_x509_crl,
+ load_der_x509_csr,
+ load_pem_x509_certificate,
+ load_pem_x509_certificates,
+ load_pem_x509_crl,
+ load_pem_x509_csr,
+ random_serial_number,
+)
+from cryptography.x509.extensions import (
+ AccessDescription,
+ AuthorityInformationAccess,
+ AuthorityKeyIdentifier,
+ BasicConstraints,
+ CertificateIssuer,
+ CertificatePolicies,
+ CRLDistributionPoints,
+ CRLNumber,
+ CRLReason,
+ DeltaCRLIndicator,
+ DistributionPoint,
+ DuplicateExtension,
+ ExtendedKeyUsage,
+ Extension,
+ ExtensionNotFound,
+ Extensions,
+ ExtensionType,
+ FreshestCRL,
+ GeneralNames,
+ InhibitAnyPolicy,
+ InvalidityDate,
+ IssuerAlternativeName,
+ IssuingDistributionPoint,
+ KeyUsage,
+ NameConstraints,
+ NoticeReference,
+ OCSPNoCheck,
+ OCSPNonce,
+ PolicyConstraints,
+ PolicyInformation,
+ PrecertificateSignedCertificateTimestamps,
+ PrecertPoison,
+ ReasonFlags,
+ SignedCertificateTimestamps,
+ SubjectAlternativeName,
+ SubjectInformationAccess,
+ SubjectKeyIdentifier,
+ TLSFeature,
+ TLSFeatureType,
+ UnrecognizedExtension,
+ UserNotice,
+)
+from cryptography.x509.general_name import (
+ DirectoryName,
+ DNSName,
+ GeneralName,
+ IPAddress,
+ OtherName,
+ RegisteredID,
+ RFC822Name,
+ UniformResourceIdentifier,
+ UnsupportedGeneralNameType,
+)
+from cryptography.x509.name import (
+ Name,
+ NameAttribute,
+ RelativeDistinguishedName,
+)
+from cryptography.x509.oid import (
+ AuthorityInformationAccessOID,
+ CertificatePoliciesOID,
+ CRLEntryExtensionOID,
+ ExtendedKeyUsageOID,
+ ExtensionOID,
+ NameOID,
+ ObjectIdentifier,
+ SignatureAlgorithmOID,
+)
+
+OID_AUTHORITY_INFORMATION_ACCESS = ExtensionOID.AUTHORITY_INFORMATION_ACCESS
+OID_AUTHORITY_KEY_IDENTIFIER = ExtensionOID.AUTHORITY_KEY_IDENTIFIER
+OID_BASIC_CONSTRAINTS = ExtensionOID.BASIC_CONSTRAINTS
+OID_CERTIFICATE_POLICIES = ExtensionOID.CERTIFICATE_POLICIES
+OID_CRL_DISTRIBUTION_POINTS = ExtensionOID.CRL_DISTRIBUTION_POINTS
+OID_EXTENDED_KEY_USAGE = ExtensionOID.EXTENDED_KEY_USAGE
+OID_FRESHEST_CRL = ExtensionOID.FRESHEST_CRL
+OID_INHIBIT_ANY_POLICY = ExtensionOID.INHIBIT_ANY_POLICY
+OID_ISSUER_ALTERNATIVE_NAME = ExtensionOID.ISSUER_ALTERNATIVE_NAME
+OID_KEY_USAGE = ExtensionOID.KEY_USAGE
+OID_NAME_CONSTRAINTS = ExtensionOID.NAME_CONSTRAINTS
+OID_OCSP_NO_CHECK = ExtensionOID.OCSP_NO_CHECK
+OID_POLICY_CONSTRAINTS = ExtensionOID.POLICY_CONSTRAINTS
+OID_POLICY_MAPPINGS = ExtensionOID.POLICY_MAPPINGS
+OID_SUBJECT_ALTERNATIVE_NAME = ExtensionOID.SUBJECT_ALTERNATIVE_NAME
+OID_SUBJECT_DIRECTORY_ATTRIBUTES = ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES
+OID_SUBJECT_INFORMATION_ACCESS = ExtensionOID.SUBJECT_INFORMATION_ACCESS
+OID_SUBJECT_KEY_IDENTIFIER = ExtensionOID.SUBJECT_KEY_IDENTIFIER
+
+OID_DSA_WITH_SHA1 = SignatureAlgorithmOID.DSA_WITH_SHA1
+OID_DSA_WITH_SHA224 = SignatureAlgorithmOID.DSA_WITH_SHA224
+OID_DSA_WITH_SHA256 = SignatureAlgorithmOID.DSA_WITH_SHA256
+OID_ECDSA_WITH_SHA1 = SignatureAlgorithmOID.ECDSA_WITH_SHA1
+OID_ECDSA_WITH_SHA224 = SignatureAlgorithmOID.ECDSA_WITH_SHA224
+OID_ECDSA_WITH_SHA256 = SignatureAlgorithmOID.ECDSA_WITH_SHA256
+OID_ECDSA_WITH_SHA384 = SignatureAlgorithmOID.ECDSA_WITH_SHA384
+OID_ECDSA_WITH_SHA512 = SignatureAlgorithmOID.ECDSA_WITH_SHA512
+OID_RSA_WITH_MD5 = SignatureAlgorithmOID.RSA_WITH_MD5
+OID_RSA_WITH_SHA1 = SignatureAlgorithmOID.RSA_WITH_SHA1
+OID_RSA_WITH_SHA224 = SignatureAlgorithmOID.RSA_WITH_SHA224
+OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256
+OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384
+OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512
+OID_RSASSA_PSS = SignatureAlgorithmOID.RSASSA_PSS
+
+OID_COMMON_NAME = NameOID.COMMON_NAME
+OID_COUNTRY_NAME = NameOID.COUNTRY_NAME
+OID_DOMAIN_COMPONENT = NameOID.DOMAIN_COMPONENT
+OID_DN_QUALIFIER = NameOID.DN_QUALIFIER
+OID_EMAIL_ADDRESS = NameOID.EMAIL_ADDRESS
+OID_GENERATION_QUALIFIER = NameOID.GENERATION_QUALIFIER
+OID_GIVEN_NAME = NameOID.GIVEN_NAME
+OID_LOCALITY_NAME = NameOID.LOCALITY_NAME
+OID_ORGANIZATIONAL_UNIT_NAME = NameOID.ORGANIZATIONAL_UNIT_NAME
+OID_ORGANIZATION_NAME = NameOID.ORGANIZATION_NAME
+OID_PSEUDONYM = NameOID.PSEUDONYM
+OID_SERIAL_NUMBER = NameOID.SERIAL_NUMBER
+OID_STATE_OR_PROVINCE_NAME = NameOID.STATE_OR_PROVINCE_NAME
+OID_SURNAME = NameOID.SURNAME
+OID_TITLE = NameOID.TITLE
+
+OID_CLIENT_AUTH = ExtendedKeyUsageOID.CLIENT_AUTH
+OID_CODE_SIGNING = ExtendedKeyUsageOID.CODE_SIGNING
+OID_EMAIL_PROTECTION = ExtendedKeyUsageOID.EMAIL_PROTECTION
+OID_OCSP_SIGNING = ExtendedKeyUsageOID.OCSP_SIGNING
+OID_SERVER_AUTH = ExtendedKeyUsageOID.SERVER_AUTH
+OID_TIME_STAMPING = ExtendedKeyUsageOID.TIME_STAMPING
+
+OID_ANY_POLICY = CertificatePoliciesOID.ANY_POLICY
+OID_CPS_QUALIFIER = CertificatePoliciesOID.CPS_QUALIFIER
+OID_CPS_USER_NOTICE = CertificatePoliciesOID.CPS_USER_NOTICE
+
+OID_CERTIFICATE_ISSUER = CRLEntryExtensionOID.CERTIFICATE_ISSUER
+OID_CRL_REASON = CRLEntryExtensionOID.CRL_REASON
+OID_INVALIDITY_DATE = CRLEntryExtensionOID.INVALIDITY_DATE
+
+OID_CA_ISSUERS = AuthorityInformationAccessOID.CA_ISSUERS
+OID_OCSP = AuthorityInformationAccessOID.OCSP
+
+__all__ = [
+ "certificate_transparency",
+ "load_pem_x509_certificate",
+ "load_pem_x509_certificates",
+ "load_der_x509_certificate",
+ "load_pem_x509_csr",
+ "load_der_x509_csr",
+ "load_pem_x509_crl",
+ "load_der_x509_crl",
+ "random_serial_number",
+ "Attribute",
+ "AttributeNotFound",
+ "Attributes",
+ "InvalidVersion",
+ "DeltaCRLIndicator",
+ "DuplicateExtension",
+ "ExtensionNotFound",
+ "UnsupportedGeneralNameType",
+ "NameAttribute",
+ "Name",
+ "RelativeDistinguishedName",
+ "ObjectIdentifier",
+ "ExtensionType",
+ "Extensions",
+ "Extension",
+ "ExtendedKeyUsage",
+ "FreshestCRL",
+ "IssuingDistributionPoint",
+ "TLSFeature",
+ "TLSFeatureType",
+ "OCSPNoCheck",
+ "BasicConstraints",
+ "CRLNumber",
+ "KeyUsage",
+ "AuthorityInformationAccess",
+ "SubjectInformationAccess",
+ "AccessDescription",
+ "CertificatePolicies",
+ "PolicyInformation",
+ "UserNotice",
+ "NoticeReference",
+ "SubjectKeyIdentifier",
+ "NameConstraints",
+ "CRLDistributionPoints",
+ "DistributionPoint",
+ "ReasonFlags",
+ "InhibitAnyPolicy",
+ "SubjectAlternativeName",
+ "IssuerAlternativeName",
+ "AuthorityKeyIdentifier",
+ "GeneralNames",
+ "GeneralName",
+ "RFC822Name",
+ "DNSName",
+ "UniformResourceIdentifier",
+ "RegisteredID",
+ "DirectoryName",
+ "IPAddress",
+ "OtherName",
+ "Certificate",
+ "CertificateRevocationList",
+ "CertificateRevocationListBuilder",
+ "CertificateSigningRequest",
+ "RevokedCertificate",
+ "RevokedCertificateBuilder",
+ "CertificateSigningRequestBuilder",
+ "CertificateBuilder",
+ "Version",
+ "OID_CA_ISSUERS",
+ "OID_OCSP",
+ "CertificateIssuer",
+ "CRLReason",
+ "InvalidityDate",
+ "UnrecognizedExtension",
+ "PolicyConstraints",
+ "PrecertificateSignedCertificateTimestamps",
+ "PrecertPoison",
+ "OCSPNonce",
+ "SignedCertificateTimestamps",
+ "SignatureAlgorithmOID",
+ "NameOID",
+]
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/base.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/base.py
new file mode 100644
index 00000000..35c846d3
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/base.py
@@ -0,0 +1,1152 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import abc
+import datetime
+import os
+import typing
+
+from cryptography import utils
+from cryptography.hazmat.bindings._rust import x509 as rust_x509
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import (
+ dsa,
+ ec,
+ ed448,
+ ed25519,
+ rsa,
+ x448,
+ x25519,
+)
+from cryptography.hazmat.primitives.asymmetric.types import (
+ CertificateIssuerPrivateKeyTypes,
+ CertificateIssuerPublicKeyTypes,
+ CertificatePublicKeyTypes,
+)
+from cryptography.x509.extensions import (
+ Extension,
+ Extensions,
+ ExtensionType,
+ _make_sequence_methods,
+)
+from cryptography.x509.name import Name, _ASN1Type
+from cryptography.x509.oid import ObjectIdentifier
+
+_EARLIEST_UTC_TIME = datetime.datetime(1950, 1, 1)
+
+# This must be kept in sync with sign.rs's list of allowable types in
+# identify_hash_type
+_AllowedHashTypes = typing.Union[
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+ hashes.SHA3_224,
+ hashes.SHA3_256,
+ hashes.SHA3_384,
+ hashes.SHA3_512,
+]
+
+
+class AttributeNotFound(Exception):
+ def __init__(self, msg: str, oid: ObjectIdentifier) -> None:
+ super().__init__(msg)
+ self.oid = oid
+
+
+def _reject_duplicate_extension(
+ extension: Extension[ExtensionType],
+ extensions: typing.List[Extension[ExtensionType]],
+) -> None:
+ # This is quadratic in the number of extensions
+ for e in extensions:
+ if e.oid == extension.oid:
+ raise ValueError("This extension has already been set.")
+
+
+def _reject_duplicate_attribute(
+ oid: ObjectIdentifier,
+ attributes: typing.List[
+ typing.Tuple[ObjectIdentifier, bytes, typing.Optional[int]]
+ ],
+) -> None:
+ # This is quadratic in the number of attributes
+ for attr_oid, _, _ in attributes:
+ if attr_oid == oid:
+ raise ValueError("This attribute has already been set.")
+
+
+def _convert_to_naive_utc_time(time: datetime.datetime) -> datetime.datetime:
+ """Normalizes a datetime to a naive datetime in UTC.
+
+ time -- datetime to normalize. Assumed to be in UTC if not timezone
+ aware.
+ """
+ if time.tzinfo is not None:
+ offset = time.utcoffset()
+ offset = offset if offset else datetime.timedelta()
+ return time.replace(tzinfo=None) - offset
+ else:
+ return time
+
+
+class Attribute:
+ def __init__(
+ self,
+ oid: ObjectIdentifier,
+ value: bytes,
+ _type: int = _ASN1Type.UTF8String.value,
+ ) -> None:
+ self._oid = oid
+ self._value = value
+ self._type = _type
+
+ @property
+ def oid(self) -> ObjectIdentifier:
+ return self._oid
+
+ @property
+ def value(self) -> bytes:
+ return self._value
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Attribute):
+ return NotImplemented
+
+ return (
+ self.oid == other.oid
+ and self.value == other.value
+ and self._type == other._type
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.oid, self.value, self._type))
+
+
+class Attributes:
+ def __init__(
+ self,
+ attributes: typing.Iterable[Attribute],
+ ) -> None:
+ self._attributes = list(attributes)
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_attributes")
+
+ def __repr__(self) -> str:
+ return f""
+
+ def get_attribute_for_oid(self, oid: ObjectIdentifier) -> Attribute:
+ for attr in self:
+ if attr.oid == oid:
+ return attr
+
+ raise AttributeNotFound(f"No {oid} attribute was found", oid)
+
+
+class Version(utils.Enum):
+ v1 = 0
+ v3 = 2
+
+
+class InvalidVersion(Exception):
+ def __init__(self, msg: str, parsed_version: int) -> None:
+ super().__init__(msg)
+ self.parsed_version = parsed_version
+
+
+class Certificate(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes:
+ """
+ Returns bytes using digest passed.
+ """
+
+ @property
+ @abc.abstractmethod
+ def serial_number(self) -> int:
+ """
+ Returns certificate serial number
+ """
+
+ @property
+ @abc.abstractmethod
+ def version(self) -> Version:
+ """
+ Returns the certificate version
+ """
+
+ @abc.abstractmethod
+ def public_key(self) -> CertificatePublicKeyTypes:
+ """
+ Returns the public key
+ """
+
+ @property
+ @abc.abstractmethod
+ def not_valid_before(self) -> datetime.datetime:
+ """
+ Not before time (represented as UTC datetime)
+ """
+
+ @property
+ @abc.abstractmethod
+ def not_valid_after(self) -> datetime.datetime:
+ """
+ Not after time (represented as UTC datetime)
+ """
+
+ @property
+ @abc.abstractmethod
+ def issuer(self) -> Name:
+ """
+ Returns the issuer name object.
+ """
+
+ @property
+ @abc.abstractmethod
+ def subject(self) -> Name:
+ """
+ Returns the subject name object.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_hash_algorithm(
+ self,
+ ) -> typing.Optional[hashes.HashAlgorithm]:
+ """
+ Returns a HashAlgorithm corresponding to the type of the digest signed
+ in the certificate.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_algorithm_oid(self) -> ObjectIdentifier:
+ """
+ Returns the ObjectIdentifier of the signature algorithm.
+ """
+
+ @property
+ @abc.abstractmethod
+ def extensions(self) -> Extensions:
+ """
+ Returns an Extensions object.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature(self) -> bytes:
+ """
+ Returns the signature bytes.
+ """
+
+ @property
+ @abc.abstractmethod
+ def tbs_certificate_bytes(self) -> bytes:
+ """
+ Returns the tbsCertificate payload bytes as defined in RFC 5280.
+ """
+
+ @property
+ @abc.abstractmethod
+ def tbs_precertificate_bytes(self) -> bytes:
+ """
+ Returns the tbsCertificate payload bytes with the SCT list extension
+ stripped.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+ @abc.abstractmethod
+ def __hash__(self) -> int:
+ """
+ Computes a hash.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding: serialization.Encoding) -> bytes:
+ """
+ Serializes the certificate to PEM or DER format.
+ """
+
+ @abc.abstractmethod
+ def verify_directly_issued_by(self, issuer: "Certificate") -> None:
+ """
+ This method verifies that certificate issuer name matches the
+ issuer subject name and that the certificate is signed by the
+ issuer's private key. No other validation is performed.
+ """
+
+
+# Runtime isinstance checks need this since the rust class is not a subclass.
+Certificate.register(rust_x509.Certificate)
+
+
+class RevokedCertificate(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def serial_number(self) -> int:
+ """
+ Returns the serial number of the revoked certificate.
+ """
+
+ @property
+ @abc.abstractmethod
+ def revocation_date(self) -> datetime.datetime:
+ """
+ Returns the date of when this certificate was revoked.
+ """
+
+ @property
+ @abc.abstractmethod
+ def extensions(self) -> Extensions:
+ """
+ Returns an Extensions object containing a list of Revoked extensions.
+ """
+
+
+# Runtime isinstance checks need this since the rust class is not a subclass.
+RevokedCertificate.register(rust_x509.RevokedCertificate)
+
+
+class _RawRevokedCertificate(RevokedCertificate):
+ def __init__(
+ self,
+ serial_number: int,
+ revocation_date: datetime.datetime,
+ extensions: Extensions,
+ ):
+ self._serial_number = serial_number
+ self._revocation_date = revocation_date
+ self._extensions = extensions
+
+ @property
+ def serial_number(self) -> int:
+ return self._serial_number
+
+ @property
+ def revocation_date(self) -> datetime.datetime:
+ return self._revocation_date
+
+ @property
+ def extensions(self) -> Extensions:
+ return self._extensions
+
+
+class CertificateRevocationList(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def public_bytes(self, encoding: serialization.Encoding) -> bytes:
+ """
+ Serializes the CRL to PEM or DER format.
+ """
+
+ @abc.abstractmethod
+ def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes:
+ """
+ Returns bytes using digest passed.
+ """
+
+ @abc.abstractmethod
+ def get_revoked_certificate_by_serial_number(
+ self, serial_number: int
+ ) -> typing.Optional[RevokedCertificate]:
+ """
+ Returns an instance of RevokedCertificate or None if the serial_number
+ is not in the CRL.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_hash_algorithm(
+ self,
+ ) -> typing.Optional[hashes.HashAlgorithm]:
+ """
+ Returns a HashAlgorithm corresponding to the type of the digest signed
+ in the certificate.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_algorithm_oid(self) -> ObjectIdentifier:
+ """
+ Returns the ObjectIdentifier of the signature algorithm.
+ """
+
+ @property
+ @abc.abstractmethod
+ def issuer(self) -> Name:
+ """
+ Returns the X509Name with the issuer of this CRL.
+ """
+
+ @property
+ @abc.abstractmethod
+ def next_update(self) -> typing.Optional[datetime.datetime]:
+ """
+ Returns the date of next update for this CRL.
+ """
+
+ @property
+ @abc.abstractmethod
+ def last_update(self) -> datetime.datetime:
+ """
+ Returns the date of last update for this CRL.
+ """
+
+ @property
+ @abc.abstractmethod
+ def extensions(self) -> Extensions:
+ """
+ Returns an Extensions object containing a list of CRL extensions.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature(self) -> bytes:
+ """
+ Returns the signature bytes.
+ """
+
+ @property
+ @abc.abstractmethod
+ def tbs_certlist_bytes(self) -> bytes:
+ """
+ Returns the tbsCertList payload bytes as defined in RFC 5280.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+ @abc.abstractmethod
+ def __len__(self) -> int:
+ """
+ Number of revoked certificates in the CRL.
+ """
+
+ @typing.overload
+ def __getitem__(self, idx: int) -> RevokedCertificate:
+ ...
+
+ @typing.overload
+ def __getitem__(self, idx: slice) -> typing.List[RevokedCertificate]:
+ ...
+
+ @abc.abstractmethod
+ def __getitem__(
+ self, idx: typing.Union[int, slice]
+ ) -> typing.Union[RevokedCertificate, typing.List[RevokedCertificate]]:
+ """
+ Returns a revoked certificate (or slice of revoked certificates).
+ """
+
+ @abc.abstractmethod
+ def __iter__(self) -> typing.Iterator[RevokedCertificate]:
+ """
+ Iterator over the revoked certificates
+ """
+
+ @abc.abstractmethod
+ def is_signature_valid(
+ self, public_key: CertificateIssuerPublicKeyTypes
+ ) -> bool:
+ """
+ Verifies signature of revocation list against given public key.
+ """
+
+
+CertificateRevocationList.register(rust_x509.CertificateRevocationList)
+
+
+class CertificateSigningRequest(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Checks equality.
+ """
+
+ @abc.abstractmethod
+ def __hash__(self) -> int:
+ """
+ Computes a hash.
+ """
+
+ @abc.abstractmethod
+ def public_key(self) -> CertificatePublicKeyTypes:
+ """
+ Returns the public key
+ """
+
+ @property
+ @abc.abstractmethod
+ def subject(self) -> Name:
+ """
+ Returns the subject name object.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_hash_algorithm(
+ self,
+ ) -> typing.Optional[hashes.HashAlgorithm]:
+ """
+ Returns a HashAlgorithm corresponding to the type of the digest signed
+ in the certificate.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_algorithm_oid(self) -> ObjectIdentifier:
+ """
+ Returns the ObjectIdentifier of the signature algorithm.
+ """
+
+ @property
+ @abc.abstractmethod
+ def extensions(self) -> Extensions:
+ """
+ Returns the extensions in the signing request.
+ """
+
+ @property
+ @abc.abstractmethod
+ def attributes(self) -> Attributes:
+ """
+ Returns an Attributes object.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding: serialization.Encoding) -> bytes:
+ """
+ Encodes the request to PEM or DER format.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature(self) -> bytes:
+ """
+ Returns the signature bytes.
+ """
+
+ @property
+ @abc.abstractmethod
+ def tbs_certrequest_bytes(self) -> bytes:
+ """
+ Returns the PKCS#10 CertificationRequestInfo bytes as defined in RFC
+ 2986.
+ """
+
+ @property
+ @abc.abstractmethod
+ def is_signature_valid(self) -> bool:
+ """
+ Verifies signature of signing request.
+ """
+
+ @abc.abstractmethod
+ def get_attribute_for_oid(self, oid: ObjectIdentifier) -> bytes:
+ """
+ Get the attribute value for a given OID.
+ """
+
+
+# Runtime isinstance checks need this since the rust class is not a subclass.
+CertificateSigningRequest.register(rust_x509.CertificateSigningRequest)
+
+
+# Backend argument preserved for API compatibility, but ignored.
+def load_pem_x509_certificate(
+ data: bytes, backend: typing.Any = None
+) -> Certificate:
+ return rust_x509.load_pem_x509_certificate(data)
+
+
+def load_pem_x509_certificates(data: bytes) -> typing.List[Certificate]:
+ return rust_x509.load_pem_x509_certificates(data)
+
+
+# Backend argument preserved for API compatibility, but ignored.
+def load_der_x509_certificate(
+ data: bytes, backend: typing.Any = None
+) -> Certificate:
+ return rust_x509.load_der_x509_certificate(data)
+
+
+# Backend argument preserved for API compatibility, but ignored.
+def load_pem_x509_csr(
+ data: bytes, backend: typing.Any = None
+) -> CertificateSigningRequest:
+ return rust_x509.load_pem_x509_csr(data)
+
+
+# Backend argument preserved for API compatibility, but ignored.
+def load_der_x509_csr(
+ data: bytes, backend: typing.Any = None
+) -> CertificateSigningRequest:
+ return rust_x509.load_der_x509_csr(data)
+
+
+# Backend argument preserved for API compatibility, but ignored.
+def load_pem_x509_crl(
+ data: bytes, backend: typing.Any = None
+) -> CertificateRevocationList:
+ return rust_x509.load_pem_x509_crl(data)
+
+
+# Backend argument preserved for API compatibility, but ignored.
+def load_der_x509_crl(
+ data: bytes, backend: typing.Any = None
+) -> CertificateRevocationList:
+ return rust_x509.load_der_x509_crl(data)
+
+
+class CertificateSigningRequestBuilder:
+ def __init__(
+ self,
+ subject_name: typing.Optional[Name] = None,
+ extensions: typing.List[Extension[ExtensionType]] = [],
+ attributes: typing.List[
+ typing.Tuple[ObjectIdentifier, bytes, typing.Optional[int]]
+ ] = [],
+ ):
+ """
+ Creates an empty X.509 certificate request (v1).
+ """
+ self._subject_name = subject_name
+ self._extensions = extensions
+ self._attributes = attributes
+
+ def subject_name(self, name: Name) -> "CertificateSigningRequestBuilder":
+ """
+ Sets the certificate requestor's distinguished name.
+ """
+ if not isinstance(name, Name):
+ raise TypeError("Expecting x509.Name object.")
+ if self._subject_name is not None:
+ raise ValueError("The subject name may only be set once.")
+ return CertificateSigningRequestBuilder(
+ name, self._extensions, self._attributes
+ )
+
+ def add_extension(
+ self, extval: ExtensionType, critical: bool
+ ) -> "CertificateSigningRequestBuilder":
+ """
+ Adds an X.509 extension to the certificate request.
+ """
+ if not isinstance(extval, ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = Extension(extval.oid, critical, extval)
+ _reject_duplicate_extension(extension, self._extensions)
+
+ return CertificateSigningRequestBuilder(
+ self._subject_name,
+ self._extensions + [extension],
+ self._attributes,
+ )
+
+ def add_attribute(
+ self,
+ oid: ObjectIdentifier,
+ value: bytes,
+ *,
+ _tag: typing.Optional[_ASN1Type] = None,
+ ) -> "CertificateSigningRequestBuilder":
+ """
+ Adds an X.509 attribute with an OID and associated value.
+ """
+ if not isinstance(oid, ObjectIdentifier):
+ raise TypeError("oid must be an ObjectIdentifier")
+
+ if not isinstance(value, bytes):
+ raise TypeError("value must be bytes")
+
+ if _tag is not None and not isinstance(_tag, _ASN1Type):
+ raise TypeError("tag must be _ASN1Type")
+
+ _reject_duplicate_attribute(oid, self._attributes)
+
+ if _tag is not None:
+ tag = _tag.value
+ else:
+ tag = None
+
+ return CertificateSigningRequestBuilder(
+ self._subject_name,
+ self._extensions,
+ self._attributes + [(oid, value, tag)],
+ )
+
+ def sign(
+ self,
+ private_key: CertificateIssuerPrivateKeyTypes,
+ algorithm: typing.Optional[_AllowedHashTypes],
+ backend: typing.Any = None,
+ ) -> CertificateSigningRequest:
+ """
+ Signs the request using the requestor's private key.
+ """
+ if self._subject_name is None:
+ raise ValueError("A CertificateSigningRequest must have a subject")
+ return rust_x509.create_x509_csr(self, private_key, algorithm)
+
+
+class CertificateBuilder:
+ _extensions: typing.List[Extension[ExtensionType]]
+
+ def __init__(
+ self,
+ issuer_name: typing.Optional[Name] = None,
+ subject_name: typing.Optional[Name] = None,
+ public_key: typing.Optional[CertificatePublicKeyTypes] = None,
+ serial_number: typing.Optional[int] = None,
+ not_valid_before: typing.Optional[datetime.datetime] = None,
+ not_valid_after: typing.Optional[datetime.datetime] = None,
+ extensions: typing.List[Extension[ExtensionType]] = [],
+ ) -> None:
+ self._version = Version.v3
+ self._issuer_name = issuer_name
+ self._subject_name = subject_name
+ self._public_key = public_key
+ self._serial_number = serial_number
+ self._not_valid_before = not_valid_before
+ self._not_valid_after = not_valid_after
+ self._extensions = extensions
+
+ def issuer_name(self, name: Name) -> "CertificateBuilder":
+ """
+ Sets the CA's distinguished name.
+ """
+ if not isinstance(name, Name):
+ raise TypeError("Expecting x509.Name object.")
+ if self._issuer_name is not None:
+ raise ValueError("The issuer name may only be set once.")
+ return CertificateBuilder(
+ name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def subject_name(self, name: Name) -> "CertificateBuilder":
+ """
+ Sets the requestor's distinguished name.
+ """
+ if not isinstance(name, Name):
+ raise TypeError("Expecting x509.Name object.")
+ if self._subject_name is not None:
+ raise ValueError("The subject name may only be set once.")
+ return CertificateBuilder(
+ self._issuer_name,
+ name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def public_key(
+ self,
+ key: CertificatePublicKeyTypes,
+ ) -> "CertificateBuilder":
+ """
+ Sets the requestor's public key (as found in the signing request).
+ """
+ if not isinstance(
+ key,
+ (
+ dsa.DSAPublicKey,
+ rsa.RSAPublicKey,
+ ec.EllipticCurvePublicKey,
+ ed25519.Ed25519PublicKey,
+ ed448.Ed448PublicKey,
+ x25519.X25519PublicKey,
+ x448.X448PublicKey,
+ ),
+ ):
+ raise TypeError(
+ "Expecting one of DSAPublicKey, RSAPublicKey,"
+ " EllipticCurvePublicKey, Ed25519PublicKey,"
+ " Ed448PublicKey, X25519PublicKey, or "
+ "X448PublicKey."
+ )
+ if self._public_key is not None:
+ raise ValueError("The public key may only be set once.")
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def serial_number(self, number: int) -> "CertificateBuilder":
+ """
+ Sets the certificate serial number.
+ """
+ if not isinstance(number, int):
+ raise TypeError("Serial number must be of integral type.")
+ if self._serial_number is not None:
+ raise ValueError("The serial number may only be set once.")
+ if number <= 0:
+ raise ValueError("The serial number should be positive.")
+
+ # ASN.1 integers are always signed, so most significant bit must be
+ # zero.
+ if number.bit_length() >= 160: # As defined in RFC 5280
+ raise ValueError(
+ "The serial number should not be more than 159 " "bits."
+ )
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def not_valid_before(
+ self, time: datetime.datetime
+ ) -> "CertificateBuilder":
+ """
+ Sets the certificate activation time.
+ """
+ if not isinstance(time, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._not_valid_before is not None:
+ raise ValueError("The not valid before may only be set once.")
+ time = _convert_to_naive_utc_time(time)
+ if time < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The not valid before date must be on or after"
+ " 1950 January 1)."
+ )
+ if self._not_valid_after is not None and time > self._not_valid_after:
+ raise ValueError(
+ "The not valid before date must be before the not valid after "
+ "date."
+ )
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ time,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def not_valid_after(self, time: datetime.datetime) -> "CertificateBuilder":
+ """
+ Sets the certificate expiration time.
+ """
+ if not isinstance(time, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._not_valid_after is not None:
+ raise ValueError("The not valid after may only be set once.")
+ time = _convert_to_naive_utc_time(time)
+ if time < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The not valid after date must be on or after"
+ " 1950 January 1."
+ )
+ if (
+ self._not_valid_before is not None
+ and time < self._not_valid_before
+ ):
+ raise ValueError(
+ "The not valid after date must be after the not valid before "
+ "date."
+ )
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ time,
+ self._extensions,
+ )
+
+ def add_extension(
+ self, extval: ExtensionType, critical: bool
+ ) -> "CertificateBuilder":
+ """
+ Adds an X.509 extension to the certificate.
+ """
+ if not isinstance(extval, ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = Extension(extval.oid, critical, extval)
+ _reject_duplicate_extension(extension, self._extensions)
+
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions + [extension],
+ )
+
+ def sign(
+ self,
+ private_key: CertificateIssuerPrivateKeyTypes,
+ algorithm: typing.Optional[_AllowedHashTypes],
+ backend: typing.Any = None,
+ ) -> Certificate:
+ """
+ Signs the certificate using the CA's private key.
+ """
+ if self._subject_name is None:
+ raise ValueError("A certificate must have a subject name")
+
+ if self._issuer_name is None:
+ raise ValueError("A certificate must have an issuer name")
+
+ if self._serial_number is None:
+ raise ValueError("A certificate must have a serial number")
+
+ if self._not_valid_before is None:
+ raise ValueError("A certificate must have a not valid before time")
+
+ if self._not_valid_after is None:
+ raise ValueError("A certificate must have a not valid after time")
+
+ if self._public_key is None:
+ raise ValueError("A certificate must have a public key")
+
+ return rust_x509.create_x509_certificate(self, private_key, algorithm)
+
+
+class CertificateRevocationListBuilder:
+ _extensions: typing.List[Extension[ExtensionType]]
+ _revoked_certificates: typing.List[RevokedCertificate]
+
+ def __init__(
+ self,
+ issuer_name: typing.Optional[Name] = None,
+ last_update: typing.Optional[datetime.datetime] = None,
+ next_update: typing.Optional[datetime.datetime] = None,
+ extensions: typing.List[Extension[ExtensionType]] = [],
+ revoked_certificates: typing.List[RevokedCertificate] = [],
+ ):
+ self._issuer_name = issuer_name
+ self._last_update = last_update
+ self._next_update = next_update
+ self._extensions = extensions
+ self._revoked_certificates = revoked_certificates
+
+ def issuer_name(
+ self, issuer_name: Name
+ ) -> "CertificateRevocationListBuilder":
+ if not isinstance(issuer_name, Name):
+ raise TypeError("Expecting x509.Name object.")
+ if self._issuer_name is not None:
+ raise ValueError("The issuer name may only be set once.")
+ return CertificateRevocationListBuilder(
+ issuer_name,
+ self._last_update,
+ self._next_update,
+ self._extensions,
+ self._revoked_certificates,
+ )
+
+ def last_update(
+ self, last_update: datetime.datetime
+ ) -> "CertificateRevocationListBuilder":
+ if not isinstance(last_update, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._last_update is not None:
+ raise ValueError("Last update may only be set once.")
+ last_update = _convert_to_naive_utc_time(last_update)
+ if last_update < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The last update date must be on or after" " 1950 January 1."
+ )
+ if self._next_update is not None and last_update > self._next_update:
+ raise ValueError(
+ "The last update date must be before the next update date."
+ )
+ return CertificateRevocationListBuilder(
+ self._issuer_name,
+ last_update,
+ self._next_update,
+ self._extensions,
+ self._revoked_certificates,
+ )
+
+ def next_update(
+ self, next_update: datetime.datetime
+ ) -> "CertificateRevocationListBuilder":
+ if not isinstance(next_update, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._next_update is not None:
+ raise ValueError("Last update may only be set once.")
+ next_update = _convert_to_naive_utc_time(next_update)
+ if next_update < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The last update date must be on or after" " 1950 January 1."
+ )
+ if self._last_update is not None and next_update < self._last_update:
+ raise ValueError(
+ "The next update date must be after the last update date."
+ )
+ return CertificateRevocationListBuilder(
+ self._issuer_name,
+ self._last_update,
+ next_update,
+ self._extensions,
+ self._revoked_certificates,
+ )
+
+ def add_extension(
+ self, extval: ExtensionType, critical: bool
+ ) -> "CertificateRevocationListBuilder":
+ """
+ Adds an X.509 extension to the certificate revocation list.
+ """
+ if not isinstance(extval, ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = Extension(extval.oid, critical, extval)
+ _reject_duplicate_extension(extension, self._extensions)
+ return CertificateRevocationListBuilder(
+ self._issuer_name,
+ self._last_update,
+ self._next_update,
+ self._extensions + [extension],
+ self._revoked_certificates,
+ )
+
+ def add_revoked_certificate(
+ self, revoked_certificate: RevokedCertificate
+ ) -> "CertificateRevocationListBuilder":
+ """
+ Adds a revoked certificate to the CRL.
+ """
+ if not isinstance(revoked_certificate, RevokedCertificate):
+ raise TypeError("Must be an instance of RevokedCertificate")
+
+ return CertificateRevocationListBuilder(
+ self._issuer_name,
+ self._last_update,
+ self._next_update,
+ self._extensions,
+ self._revoked_certificates + [revoked_certificate],
+ )
+
+ def sign(
+ self,
+ private_key: CertificateIssuerPrivateKeyTypes,
+ algorithm: typing.Optional[_AllowedHashTypes],
+ backend: typing.Any = None,
+ ) -> CertificateRevocationList:
+ if self._issuer_name is None:
+ raise ValueError("A CRL must have an issuer name")
+
+ if self._last_update is None:
+ raise ValueError("A CRL must have a last update time")
+
+ if self._next_update is None:
+ raise ValueError("A CRL must have a next update time")
+
+ return rust_x509.create_x509_crl(self, private_key, algorithm)
+
+
+class RevokedCertificateBuilder:
+ def __init__(
+ self,
+ serial_number: typing.Optional[int] = None,
+ revocation_date: typing.Optional[datetime.datetime] = None,
+ extensions: typing.List[Extension[ExtensionType]] = [],
+ ):
+ self._serial_number = serial_number
+ self._revocation_date = revocation_date
+ self._extensions = extensions
+
+ def serial_number(self, number: int) -> "RevokedCertificateBuilder":
+ if not isinstance(number, int):
+ raise TypeError("Serial number must be of integral type.")
+ if self._serial_number is not None:
+ raise ValueError("The serial number may only be set once.")
+ if number <= 0:
+ raise ValueError("The serial number should be positive")
+
+ # ASN.1 integers are always signed, so most significant bit must be
+ # zero.
+ if number.bit_length() >= 160: # As defined in RFC 5280
+ raise ValueError(
+ "The serial number should not be more than 159 " "bits."
+ )
+ return RevokedCertificateBuilder(
+ number, self._revocation_date, self._extensions
+ )
+
+ def revocation_date(
+ self, time: datetime.datetime
+ ) -> "RevokedCertificateBuilder":
+ if not isinstance(time, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._revocation_date is not None:
+ raise ValueError("The revocation date may only be set once.")
+ time = _convert_to_naive_utc_time(time)
+ if time < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The revocation date must be on or after" " 1950 January 1."
+ )
+ return RevokedCertificateBuilder(
+ self._serial_number, time, self._extensions
+ )
+
+ def add_extension(
+ self, extval: ExtensionType, critical: bool
+ ) -> "RevokedCertificateBuilder":
+ if not isinstance(extval, ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = Extension(extval.oid, critical, extval)
+ _reject_duplicate_extension(extension, self._extensions)
+ return RevokedCertificateBuilder(
+ self._serial_number,
+ self._revocation_date,
+ self._extensions + [extension],
+ )
+
+ def build(self, backend: typing.Any = None) -> RevokedCertificate:
+ if self._serial_number is None:
+ raise ValueError("A revoked certificate must have a serial number")
+ if self._revocation_date is None:
+ raise ValueError(
+ "A revoked certificate must have a revocation date"
+ )
+ return _RawRevokedCertificate(
+ self._serial_number,
+ self._revocation_date,
+ Extensions(self._extensions),
+ )
+
+
+def random_serial_number() -> int:
+ return int.from_bytes(os.urandom(20), "big") >> 1
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/certificate_transparency.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/certificate_transparency.py
new file mode 100644
index 00000000..a6770986
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/certificate_transparency.py
@@ -0,0 +1,96 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import abc
+import datetime
+
+from cryptography import utils
+from cryptography.hazmat.bindings._rust import x509 as rust_x509
+from cryptography.hazmat.primitives.hashes import HashAlgorithm
+
+
+class LogEntryType(utils.Enum):
+ X509_CERTIFICATE = 0
+ PRE_CERTIFICATE = 1
+
+
+class Version(utils.Enum):
+ v1 = 0
+
+
+class SignatureAlgorithm(utils.Enum):
+ """
+ Signature algorithms that are valid for SCTs.
+
+ These are exactly the same as SignatureAlgorithm in RFC 5246 (TLS 1.2).
+
+ See:
+ """
+
+ ANONYMOUS = 0
+ RSA = 1
+ DSA = 2
+ ECDSA = 3
+
+
+class SignedCertificateTimestamp(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def version(self) -> Version:
+ """
+ Returns the SCT version.
+ """
+
+ @property
+ @abc.abstractmethod
+ def log_id(self) -> bytes:
+ """
+ Returns an identifier indicating which log this SCT is for.
+ """
+
+ @property
+ @abc.abstractmethod
+ def timestamp(self) -> datetime.datetime:
+ """
+ Returns the timestamp for this SCT.
+ """
+
+ @property
+ @abc.abstractmethod
+ def entry_type(self) -> LogEntryType:
+ """
+ Returns whether this is an SCT for a certificate or pre-certificate.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_hash_algorithm(self) -> HashAlgorithm:
+ """
+ Returns the hash algorithm used for the SCT's signature.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_algorithm(self) -> SignatureAlgorithm:
+ """
+ Returns the signing algorithm used for the SCT's signature.
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature(self) -> bytes:
+ """
+ Returns the signature for this SCT.
+ """
+
+ @property
+ @abc.abstractmethod
+ def extension_bytes(self) -> bytes:
+ """
+ Returns the raw bytes of any extensions for this SCT.
+ """
+
+
+SignedCertificateTimestamp.register(rust_x509.Sct)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/extensions.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/extensions.py
new file mode 100644
index 00000000..551887b4
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/extensions.py
@@ -0,0 +1,2126 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import abc
+import datetime
+import hashlib
+import ipaddress
+import typing
+
+from cryptography import utils
+from cryptography.hazmat.bindings._rust import asn1
+from cryptography.hazmat.bindings._rust import x509 as rust_x509
+from cryptography.hazmat.primitives import constant_time, serialization
+from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey
+from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey
+from cryptography.hazmat.primitives.asymmetric.types import (
+ CertificateIssuerPublicKeyTypes,
+ CertificatePublicKeyTypes,
+)
+from cryptography.x509.certificate_transparency import (
+ SignedCertificateTimestamp,
+)
+from cryptography.x509.general_name import (
+ DirectoryName,
+ DNSName,
+ GeneralName,
+ IPAddress,
+ OtherName,
+ RegisteredID,
+ RFC822Name,
+ UniformResourceIdentifier,
+ _IPAddressTypes,
+)
+from cryptography.x509.name import Name, RelativeDistinguishedName
+from cryptography.x509.oid import (
+ CRLEntryExtensionOID,
+ ExtensionOID,
+ ObjectIdentifier,
+ OCSPExtensionOID,
+)
+
+ExtensionTypeVar = typing.TypeVar(
+ "ExtensionTypeVar", bound="ExtensionType", covariant=True
+)
+
+
+def _key_identifier_from_public_key(
+ public_key: CertificatePublicKeyTypes,
+) -> bytes:
+ if isinstance(public_key, RSAPublicKey):
+ data = public_key.public_bytes(
+ serialization.Encoding.DER,
+ serialization.PublicFormat.PKCS1,
+ )
+ elif isinstance(public_key, EllipticCurvePublicKey):
+ data = public_key.public_bytes(
+ serialization.Encoding.X962,
+ serialization.PublicFormat.UncompressedPoint,
+ )
+ else:
+ # This is a very slow way to do this.
+ serialized = public_key.public_bytes(
+ serialization.Encoding.DER,
+ serialization.PublicFormat.SubjectPublicKeyInfo,
+ )
+ data = asn1.parse_spki_for_data(serialized)
+
+ return hashlib.sha1(data).digest()
+
+
+def _make_sequence_methods(field_name: str):
+ def len_method(self) -> int:
+ return len(getattr(self, field_name))
+
+ def iter_method(self):
+ return iter(getattr(self, field_name))
+
+ def getitem_method(self, idx):
+ return getattr(self, field_name)[idx]
+
+ return len_method, iter_method, getitem_method
+
+
+class DuplicateExtension(Exception):
+ def __init__(self, msg: str, oid: ObjectIdentifier) -> None:
+ super().__init__(msg)
+ self.oid = oid
+
+
+class ExtensionNotFound(Exception):
+ def __init__(self, msg: str, oid: ObjectIdentifier) -> None:
+ super().__init__(msg)
+ self.oid = oid
+
+
+class ExtensionType(metaclass=abc.ABCMeta):
+ oid: typing.ClassVar[ObjectIdentifier]
+
+ def public_bytes(self) -> bytes:
+ """
+ Serializes the extension type to DER.
+ """
+ raise NotImplementedError(
+ "public_bytes is not implemented for extension type {!r}".format(
+ self
+ )
+ )
+
+
+class Extensions:
+ def __init__(
+ self, extensions: typing.Iterable["Extension[ExtensionType]"]
+ ) -> None:
+ self._extensions = list(extensions)
+
+ def get_extension_for_oid(
+ self, oid: ObjectIdentifier
+ ) -> "Extension[ExtensionType]":
+ for ext in self:
+ if ext.oid == oid:
+ return ext
+
+ raise ExtensionNotFound(f"No {oid} extension was found", oid)
+
+ def get_extension_for_class(
+ self, extclass: typing.Type[ExtensionTypeVar]
+ ) -> "Extension[ExtensionTypeVar]":
+ if extclass is UnrecognizedExtension:
+ raise TypeError(
+ "UnrecognizedExtension can't be used with "
+ "get_extension_for_class because more than one instance of the"
+ " class may be present."
+ )
+
+ for ext in self:
+ if isinstance(ext.value, extclass):
+ return ext
+
+ raise ExtensionNotFound(
+ f"No {extclass} extension was found", extclass.oid
+ )
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_extensions")
+
+ def __repr__(self) -> str:
+ return f""
+
+
+class CRLNumber(ExtensionType):
+ oid = ExtensionOID.CRL_NUMBER
+
+ def __init__(self, crl_number: int) -> None:
+ if not isinstance(crl_number, int):
+ raise TypeError("crl_number must be an integer")
+
+ self._crl_number = crl_number
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CRLNumber):
+ return NotImplemented
+
+ return self.crl_number == other.crl_number
+
+ def __hash__(self) -> int:
+ return hash(self.crl_number)
+
+ def __repr__(self) -> str:
+ return f""
+
+ @property
+ def crl_number(self) -> int:
+ return self._crl_number
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class AuthorityKeyIdentifier(ExtensionType):
+ oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER
+
+ def __init__(
+ self,
+ key_identifier: typing.Optional[bytes],
+ authority_cert_issuer: typing.Optional[typing.Iterable[GeneralName]],
+ authority_cert_serial_number: typing.Optional[int],
+ ) -> None:
+ if (authority_cert_issuer is None) != (
+ authority_cert_serial_number is None
+ ):
+ raise ValueError(
+ "authority_cert_issuer and authority_cert_serial_number "
+ "must both be present or both None"
+ )
+
+ if authority_cert_issuer is not None:
+ authority_cert_issuer = list(authority_cert_issuer)
+ if not all(
+ isinstance(x, GeneralName) for x in authority_cert_issuer
+ ):
+ raise TypeError(
+ "authority_cert_issuer must be a list of GeneralName "
+ "objects"
+ )
+
+ if authority_cert_serial_number is not None and not isinstance(
+ authority_cert_serial_number, int
+ ):
+ raise TypeError("authority_cert_serial_number must be an integer")
+
+ self._key_identifier = key_identifier
+ self._authority_cert_issuer = authority_cert_issuer
+ self._authority_cert_serial_number = authority_cert_serial_number
+
+ # This takes a subset of CertificatePublicKeyTypes because an issuer
+ # cannot have an X25519/X448 key. This introduces some unfortunate
+ # asymmetry that requires typing users to explicitly
+ # narrow their type, but we should make this accurate and not just
+ # convenient.
+ @classmethod
+ def from_issuer_public_key(
+ cls, public_key: CertificateIssuerPublicKeyTypes
+ ) -> "AuthorityKeyIdentifier":
+ digest = _key_identifier_from_public_key(public_key)
+ return cls(
+ key_identifier=digest,
+ authority_cert_issuer=None,
+ authority_cert_serial_number=None,
+ )
+
+ @classmethod
+ def from_issuer_subject_key_identifier(
+ cls, ski: "SubjectKeyIdentifier"
+ ) -> "AuthorityKeyIdentifier":
+ return cls(
+ key_identifier=ski.digest,
+ authority_cert_issuer=None,
+ authority_cert_serial_number=None,
+ )
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, AuthorityKeyIdentifier):
+ return NotImplemented
+
+ return (
+ self.key_identifier == other.key_identifier
+ and self.authority_cert_issuer == other.authority_cert_issuer
+ and self.authority_cert_serial_number
+ == other.authority_cert_serial_number
+ )
+
+ def __hash__(self) -> int:
+ if self.authority_cert_issuer is None:
+ aci = None
+ else:
+ aci = tuple(self.authority_cert_issuer)
+ return hash(
+ (self.key_identifier, aci, self.authority_cert_serial_number)
+ )
+
+ @property
+ def key_identifier(self) -> typing.Optional[bytes]:
+ return self._key_identifier
+
+ @property
+ def authority_cert_issuer(
+ self,
+ ) -> typing.Optional[typing.List[GeneralName]]:
+ return self._authority_cert_issuer
+
+ @property
+ def authority_cert_serial_number(self) -> typing.Optional[int]:
+ return self._authority_cert_serial_number
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class SubjectKeyIdentifier(ExtensionType):
+ oid = ExtensionOID.SUBJECT_KEY_IDENTIFIER
+
+ def __init__(self, digest: bytes) -> None:
+ self._digest = digest
+
+ @classmethod
+ def from_public_key(
+ cls, public_key: CertificatePublicKeyTypes
+ ) -> "SubjectKeyIdentifier":
+ return cls(_key_identifier_from_public_key(public_key))
+
+ @property
+ def digest(self) -> bytes:
+ return self._digest
+
+ @property
+ def key_identifier(self) -> bytes:
+ return self._digest
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, SubjectKeyIdentifier):
+ return NotImplemented
+
+ return constant_time.bytes_eq(self.digest, other.digest)
+
+ def __hash__(self) -> int:
+ return hash(self.digest)
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class AuthorityInformationAccess(ExtensionType):
+ oid = ExtensionOID.AUTHORITY_INFORMATION_ACCESS
+
+ def __init__(
+ self, descriptions: typing.Iterable["AccessDescription"]
+ ) -> None:
+ descriptions = list(descriptions)
+ if not all(isinstance(x, AccessDescription) for x in descriptions):
+ raise TypeError(
+ "Every item in the descriptions list must be an "
+ "AccessDescription"
+ )
+
+ self._descriptions = descriptions
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions")
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, AuthorityInformationAccess):
+ return NotImplemented
+
+ return self._descriptions == other._descriptions
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._descriptions))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class SubjectInformationAccess(ExtensionType):
+ oid = ExtensionOID.SUBJECT_INFORMATION_ACCESS
+
+ def __init__(
+ self, descriptions: typing.Iterable["AccessDescription"]
+ ) -> None:
+ descriptions = list(descriptions)
+ if not all(isinstance(x, AccessDescription) for x in descriptions):
+ raise TypeError(
+ "Every item in the descriptions list must be an "
+ "AccessDescription"
+ )
+
+ self._descriptions = descriptions
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions")
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, SubjectInformationAccess):
+ return NotImplemented
+
+ return self._descriptions == other._descriptions
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._descriptions))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class AccessDescription:
+ def __init__(
+ self, access_method: ObjectIdentifier, access_location: GeneralName
+ ) -> None:
+ if not isinstance(access_method, ObjectIdentifier):
+ raise TypeError("access_method must be an ObjectIdentifier")
+
+ if not isinstance(access_location, GeneralName):
+ raise TypeError("access_location must be a GeneralName")
+
+ self._access_method = access_method
+ self._access_location = access_location
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, AccessDescription):
+ return NotImplemented
+
+ return (
+ self.access_method == other.access_method
+ and self.access_location == other.access_location
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.access_method, self.access_location))
+
+ @property
+ def access_method(self) -> ObjectIdentifier:
+ return self._access_method
+
+ @property
+ def access_location(self) -> GeneralName:
+ return self._access_location
+
+
+class BasicConstraints(ExtensionType):
+ oid = ExtensionOID.BASIC_CONSTRAINTS
+
+ def __init__(self, ca: bool, path_length: typing.Optional[int]) -> None:
+ if not isinstance(ca, bool):
+ raise TypeError("ca must be a boolean value")
+
+ if path_length is not None and not ca:
+ raise ValueError("path_length must be None when ca is False")
+
+ if path_length is not None and (
+ not isinstance(path_length, int) or path_length < 0
+ ):
+ raise TypeError(
+ "path_length must be a non-negative integer or None"
+ )
+
+ self._ca = ca
+ self._path_length = path_length
+
+ @property
+ def ca(self) -> bool:
+ return self._ca
+
+ @property
+ def path_length(self) -> typing.Optional[int]:
+ return self._path_length
+
+ def __repr__(self) -> str:
+ return (
+ ""
+ ).format(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, BasicConstraints):
+ return NotImplemented
+
+ return self.ca == other.ca and self.path_length == other.path_length
+
+ def __hash__(self) -> int:
+ return hash((self.ca, self.path_length))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class DeltaCRLIndicator(ExtensionType):
+ oid = ExtensionOID.DELTA_CRL_INDICATOR
+
+ def __init__(self, crl_number: int) -> None:
+ if not isinstance(crl_number, int):
+ raise TypeError("crl_number must be an integer")
+
+ self._crl_number = crl_number
+
+ @property
+ def crl_number(self) -> int:
+ return self._crl_number
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DeltaCRLIndicator):
+ return NotImplemented
+
+ return self.crl_number == other.crl_number
+
+ def __hash__(self) -> int:
+ return hash(self.crl_number)
+
+ def __repr__(self) -> str:
+ return f""
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class CRLDistributionPoints(ExtensionType):
+ oid = ExtensionOID.CRL_DISTRIBUTION_POINTS
+
+ def __init__(
+ self, distribution_points: typing.Iterable["DistributionPoint"]
+ ) -> None:
+ distribution_points = list(distribution_points)
+ if not all(
+ isinstance(x, DistributionPoint) for x in distribution_points
+ ):
+ raise TypeError(
+ "distribution_points must be a list of DistributionPoint "
+ "objects"
+ )
+
+ self._distribution_points = distribution_points
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_distribution_points"
+ )
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CRLDistributionPoints):
+ return NotImplemented
+
+ return self._distribution_points == other._distribution_points
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._distribution_points))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class FreshestCRL(ExtensionType):
+ oid = ExtensionOID.FRESHEST_CRL
+
+ def __init__(
+ self, distribution_points: typing.Iterable["DistributionPoint"]
+ ) -> None:
+ distribution_points = list(distribution_points)
+ if not all(
+ isinstance(x, DistributionPoint) for x in distribution_points
+ ):
+ raise TypeError(
+ "distribution_points must be a list of DistributionPoint "
+ "objects"
+ )
+
+ self._distribution_points = distribution_points
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_distribution_points"
+ )
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, FreshestCRL):
+ return NotImplemented
+
+ return self._distribution_points == other._distribution_points
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._distribution_points))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class DistributionPoint:
+ def __init__(
+ self,
+ full_name: typing.Optional[typing.Iterable[GeneralName]],
+ relative_name: typing.Optional[RelativeDistinguishedName],
+ reasons: typing.Optional[typing.FrozenSet["ReasonFlags"]],
+ crl_issuer: typing.Optional[typing.Iterable[GeneralName]],
+ ) -> None:
+ if full_name and relative_name:
+ raise ValueError(
+ "You cannot provide both full_name and relative_name, at "
+ "least one must be None."
+ )
+ if not full_name and not relative_name and not crl_issuer:
+ raise ValueError(
+ "Either full_name, relative_name or crl_issuer must be "
+ "provided."
+ )
+
+ if full_name is not None:
+ full_name = list(full_name)
+ if not all(isinstance(x, GeneralName) for x in full_name):
+ raise TypeError(
+ "full_name must be a list of GeneralName objects"
+ )
+
+ if relative_name:
+ if not isinstance(relative_name, RelativeDistinguishedName):
+ raise TypeError(
+ "relative_name must be a RelativeDistinguishedName"
+ )
+
+ if crl_issuer is not None:
+ crl_issuer = list(crl_issuer)
+ if not all(isinstance(x, GeneralName) for x in crl_issuer):
+ raise TypeError(
+ "crl_issuer must be None or a list of general names"
+ )
+
+ if reasons and (
+ not isinstance(reasons, frozenset)
+ or not all(isinstance(x, ReasonFlags) for x in reasons)
+ ):
+ raise TypeError("reasons must be None or frozenset of ReasonFlags")
+
+ if reasons and (
+ ReasonFlags.unspecified in reasons
+ or ReasonFlags.remove_from_crl in reasons
+ ):
+ raise ValueError(
+ "unspecified and remove_from_crl are not valid reasons in a "
+ "DistributionPoint"
+ )
+
+ self._full_name = full_name
+ self._relative_name = relative_name
+ self._reasons = reasons
+ self._crl_issuer = crl_issuer
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DistributionPoint):
+ return NotImplemented
+
+ return (
+ self.full_name == other.full_name
+ and self.relative_name == other.relative_name
+ and self.reasons == other.reasons
+ and self.crl_issuer == other.crl_issuer
+ )
+
+ def __hash__(self) -> int:
+ if self.full_name is not None:
+ fn: typing.Optional[typing.Tuple[GeneralName, ...]] = tuple(
+ self.full_name
+ )
+ else:
+ fn = None
+
+ if self.crl_issuer is not None:
+ crl_issuer: typing.Optional[
+ typing.Tuple[GeneralName, ...]
+ ] = tuple(self.crl_issuer)
+ else:
+ crl_issuer = None
+
+ return hash((fn, self.relative_name, self.reasons, crl_issuer))
+
+ @property
+ def full_name(self) -> typing.Optional[typing.List[GeneralName]]:
+ return self._full_name
+
+ @property
+ def relative_name(self) -> typing.Optional[RelativeDistinguishedName]:
+ return self._relative_name
+
+ @property
+ def reasons(self) -> typing.Optional[typing.FrozenSet["ReasonFlags"]]:
+ return self._reasons
+
+ @property
+ def crl_issuer(self) -> typing.Optional[typing.List[GeneralName]]:
+ return self._crl_issuer
+
+
+class ReasonFlags(utils.Enum):
+ unspecified = "unspecified"
+ key_compromise = "keyCompromise"
+ ca_compromise = "cACompromise"
+ affiliation_changed = "affiliationChanged"
+ superseded = "superseded"
+ cessation_of_operation = "cessationOfOperation"
+ certificate_hold = "certificateHold"
+ privilege_withdrawn = "privilegeWithdrawn"
+ aa_compromise = "aACompromise"
+ remove_from_crl = "removeFromCRL"
+
+
+# These are distribution point bit string mappings. Not to be confused with
+# CRLReason reason flags bit string mappings.
+# ReasonFlags ::= BIT STRING {
+# unused (0),
+# keyCompromise (1),
+# cACompromise (2),
+# affiliationChanged (3),
+# superseded (4),
+# cessationOfOperation (5),
+# certificateHold (6),
+# privilegeWithdrawn (7),
+# aACompromise (8) }
+_REASON_BIT_MAPPING = {
+ 1: ReasonFlags.key_compromise,
+ 2: ReasonFlags.ca_compromise,
+ 3: ReasonFlags.affiliation_changed,
+ 4: ReasonFlags.superseded,
+ 5: ReasonFlags.cessation_of_operation,
+ 6: ReasonFlags.certificate_hold,
+ 7: ReasonFlags.privilege_withdrawn,
+ 8: ReasonFlags.aa_compromise,
+}
+
+_CRLREASONFLAGS = {
+ ReasonFlags.key_compromise: 1,
+ ReasonFlags.ca_compromise: 2,
+ ReasonFlags.affiliation_changed: 3,
+ ReasonFlags.superseded: 4,
+ ReasonFlags.cessation_of_operation: 5,
+ ReasonFlags.certificate_hold: 6,
+ ReasonFlags.privilege_withdrawn: 7,
+ ReasonFlags.aa_compromise: 8,
+}
+
+
+class PolicyConstraints(ExtensionType):
+ oid = ExtensionOID.POLICY_CONSTRAINTS
+
+ def __init__(
+ self,
+ require_explicit_policy: typing.Optional[int],
+ inhibit_policy_mapping: typing.Optional[int],
+ ) -> None:
+ if require_explicit_policy is not None and not isinstance(
+ require_explicit_policy, int
+ ):
+ raise TypeError(
+ "require_explicit_policy must be a non-negative integer or "
+ "None"
+ )
+
+ if inhibit_policy_mapping is not None and not isinstance(
+ inhibit_policy_mapping, int
+ ):
+ raise TypeError(
+ "inhibit_policy_mapping must be a non-negative integer or None"
+ )
+
+ if inhibit_policy_mapping is None and require_explicit_policy is None:
+ raise ValueError(
+ "At least one of require_explicit_policy and "
+ "inhibit_policy_mapping must not be None"
+ )
+
+ self._require_explicit_policy = require_explicit_policy
+ self._inhibit_policy_mapping = inhibit_policy_mapping
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, PolicyConstraints):
+ return NotImplemented
+
+ return (
+ self.require_explicit_policy == other.require_explicit_policy
+ and self.inhibit_policy_mapping == other.inhibit_policy_mapping
+ )
+
+ def __hash__(self) -> int:
+ return hash(
+ (self.require_explicit_policy, self.inhibit_policy_mapping)
+ )
+
+ @property
+ def require_explicit_policy(self) -> typing.Optional[int]:
+ return self._require_explicit_policy
+
+ @property
+ def inhibit_policy_mapping(self) -> typing.Optional[int]:
+ return self._inhibit_policy_mapping
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class CertificatePolicies(ExtensionType):
+ oid = ExtensionOID.CERTIFICATE_POLICIES
+
+ def __init__(self, policies: typing.Iterable["PolicyInformation"]) -> None:
+ policies = list(policies)
+ if not all(isinstance(x, PolicyInformation) for x in policies):
+ raise TypeError(
+ "Every item in the policies list must be a "
+ "PolicyInformation"
+ )
+
+ self._policies = policies
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_policies")
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CertificatePolicies):
+ return NotImplemented
+
+ return self._policies == other._policies
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._policies))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class PolicyInformation:
+ def __init__(
+ self,
+ policy_identifier: ObjectIdentifier,
+ policy_qualifiers: typing.Optional[
+ typing.Iterable[typing.Union[str, "UserNotice"]]
+ ],
+ ) -> None:
+ if not isinstance(policy_identifier, ObjectIdentifier):
+ raise TypeError("policy_identifier must be an ObjectIdentifier")
+
+ self._policy_identifier = policy_identifier
+
+ if policy_qualifiers is not None:
+ policy_qualifiers = list(policy_qualifiers)
+ if not all(
+ isinstance(x, (str, UserNotice)) for x in policy_qualifiers
+ ):
+ raise TypeError(
+ "policy_qualifiers must be a list of strings and/or "
+ "UserNotice objects or None"
+ )
+
+ self._policy_qualifiers = policy_qualifiers
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, PolicyInformation):
+ return NotImplemented
+
+ return (
+ self.policy_identifier == other.policy_identifier
+ and self.policy_qualifiers == other.policy_qualifiers
+ )
+
+ def __hash__(self) -> int:
+ if self.policy_qualifiers is not None:
+ pq: typing.Optional[
+ typing.Tuple[typing.Union[str, "UserNotice"], ...]
+ ] = tuple(self.policy_qualifiers)
+ else:
+ pq = None
+
+ return hash((self.policy_identifier, pq))
+
+ @property
+ def policy_identifier(self) -> ObjectIdentifier:
+ return self._policy_identifier
+
+ @property
+ def policy_qualifiers(
+ self,
+ ) -> typing.Optional[typing.List[typing.Union[str, "UserNotice"]]]:
+ return self._policy_qualifiers
+
+
+class UserNotice:
+ def __init__(
+ self,
+ notice_reference: typing.Optional["NoticeReference"],
+ explicit_text: typing.Optional[str],
+ ) -> None:
+ if notice_reference and not isinstance(
+ notice_reference, NoticeReference
+ ):
+ raise TypeError(
+ "notice_reference must be None or a NoticeReference"
+ )
+
+ self._notice_reference = notice_reference
+ self._explicit_text = explicit_text
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, UserNotice):
+ return NotImplemented
+
+ return (
+ self.notice_reference == other.notice_reference
+ and self.explicit_text == other.explicit_text
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.notice_reference, self.explicit_text))
+
+ @property
+ def notice_reference(self) -> typing.Optional["NoticeReference"]:
+ return self._notice_reference
+
+ @property
+ def explicit_text(self) -> typing.Optional[str]:
+ return self._explicit_text
+
+
+class NoticeReference:
+ def __init__(
+ self,
+ organization: typing.Optional[str],
+ notice_numbers: typing.Iterable[int],
+ ) -> None:
+ self._organization = organization
+ notice_numbers = list(notice_numbers)
+ if not all(isinstance(x, int) for x in notice_numbers):
+ raise TypeError("notice_numbers must be a list of integers")
+
+ self._notice_numbers = notice_numbers
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, NoticeReference):
+ return NotImplemented
+
+ return (
+ self.organization == other.organization
+ and self.notice_numbers == other.notice_numbers
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.organization, tuple(self.notice_numbers)))
+
+ @property
+ def organization(self) -> typing.Optional[str]:
+ return self._organization
+
+ @property
+ def notice_numbers(self) -> typing.List[int]:
+ return self._notice_numbers
+
+
+class ExtendedKeyUsage(ExtensionType):
+ oid = ExtensionOID.EXTENDED_KEY_USAGE
+
+ def __init__(self, usages: typing.Iterable[ObjectIdentifier]) -> None:
+ usages = list(usages)
+ if not all(isinstance(x, ObjectIdentifier) for x in usages):
+ raise TypeError(
+ "Every item in the usages list must be an ObjectIdentifier"
+ )
+
+ self._usages = usages
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_usages")
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, ExtendedKeyUsage):
+ return NotImplemented
+
+ return self._usages == other._usages
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._usages))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class OCSPNoCheck(ExtensionType):
+ oid = ExtensionOID.OCSP_NO_CHECK
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, OCSPNoCheck):
+ return NotImplemented
+
+ return True
+
+ def __hash__(self) -> int:
+ return hash(OCSPNoCheck)
+
+ def __repr__(self) -> str:
+ return ""
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class PrecertPoison(ExtensionType):
+ oid = ExtensionOID.PRECERT_POISON
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, PrecertPoison):
+ return NotImplemented
+
+ return True
+
+ def __hash__(self) -> int:
+ return hash(PrecertPoison)
+
+ def __repr__(self) -> str:
+ return ""
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class TLSFeature(ExtensionType):
+ oid = ExtensionOID.TLS_FEATURE
+
+ def __init__(self, features: typing.Iterable["TLSFeatureType"]) -> None:
+ features = list(features)
+ if (
+ not all(isinstance(x, TLSFeatureType) for x in features)
+ or len(features) == 0
+ ):
+ raise TypeError(
+ "features must be a list of elements from the TLSFeatureType "
+ "enum"
+ )
+
+ self._features = features
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_features")
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, TLSFeature):
+ return NotImplemented
+
+ return self._features == other._features
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._features))
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class TLSFeatureType(utils.Enum):
+ # status_request is defined in RFC 6066 and is used for what is commonly
+ # called OCSP Must-Staple when present in the TLS Feature extension in an
+ # X.509 certificate.
+ status_request = 5
+ # status_request_v2 is defined in RFC 6961 and allows multiple OCSP
+ # responses to be provided. It is not currently in use by clients or
+ # servers.
+ status_request_v2 = 17
+
+
+_TLS_FEATURE_TYPE_TO_ENUM = {x.value: x for x in TLSFeatureType}
+
+
+class InhibitAnyPolicy(ExtensionType):
+ oid = ExtensionOID.INHIBIT_ANY_POLICY
+
+ def __init__(self, skip_certs: int) -> None:
+ if not isinstance(skip_certs, int):
+ raise TypeError("skip_certs must be an integer")
+
+ if skip_certs < 0:
+ raise ValueError("skip_certs must be a non-negative integer")
+
+ self._skip_certs = skip_certs
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, InhibitAnyPolicy):
+ return NotImplemented
+
+ return self.skip_certs == other.skip_certs
+
+ def __hash__(self) -> int:
+ return hash(self.skip_certs)
+
+ @property
+ def skip_certs(self) -> int:
+ return self._skip_certs
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class KeyUsage(ExtensionType):
+ oid = ExtensionOID.KEY_USAGE
+
+ def __init__(
+ self,
+ digital_signature: bool,
+ content_commitment: bool,
+ key_encipherment: bool,
+ data_encipherment: bool,
+ key_agreement: bool,
+ key_cert_sign: bool,
+ crl_sign: bool,
+ encipher_only: bool,
+ decipher_only: bool,
+ ) -> None:
+ if not key_agreement and (encipher_only or decipher_only):
+ raise ValueError(
+ "encipher_only and decipher_only can only be true when "
+ "key_agreement is true"
+ )
+
+ self._digital_signature = digital_signature
+ self._content_commitment = content_commitment
+ self._key_encipherment = key_encipherment
+ self._data_encipherment = data_encipherment
+ self._key_agreement = key_agreement
+ self._key_cert_sign = key_cert_sign
+ self._crl_sign = crl_sign
+ self._encipher_only = encipher_only
+ self._decipher_only = decipher_only
+
+ @property
+ def digital_signature(self) -> bool:
+ return self._digital_signature
+
+ @property
+ def content_commitment(self) -> bool:
+ return self._content_commitment
+
+ @property
+ def key_encipherment(self) -> bool:
+ return self._key_encipherment
+
+ @property
+ def data_encipherment(self) -> bool:
+ return self._data_encipherment
+
+ @property
+ def key_agreement(self) -> bool:
+ return self._key_agreement
+
+ @property
+ def key_cert_sign(self) -> bool:
+ return self._key_cert_sign
+
+ @property
+ def crl_sign(self) -> bool:
+ return self._crl_sign
+
+ @property
+ def encipher_only(self) -> bool:
+ if not self.key_agreement:
+ raise ValueError(
+ "encipher_only is undefined unless key_agreement is true"
+ )
+ else:
+ return self._encipher_only
+
+ @property
+ def decipher_only(self) -> bool:
+ if not self.key_agreement:
+ raise ValueError(
+ "decipher_only is undefined unless key_agreement is true"
+ )
+ else:
+ return self._decipher_only
+
+ def __repr__(self) -> str:
+ try:
+ encipher_only = self.encipher_only
+ decipher_only = self.decipher_only
+ except ValueError:
+ # Users found None confusing because even though encipher/decipher
+ # have no meaning unless key_agreement is true, to construct an
+ # instance of the class you still need to pass False.
+ encipher_only = False
+ decipher_only = False
+
+ return (
+ ""
+ ).format(self, encipher_only, decipher_only)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, KeyUsage):
+ return NotImplemented
+
+ return (
+ self.digital_signature == other.digital_signature
+ and self.content_commitment == other.content_commitment
+ and self.key_encipherment == other.key_encipherment
+ and self.data_encipherment == other.data_encipherment
+ and self.key_agreement == other.key_agreement
+ and self.key_cert_sign == other.key_cert_sign
+ and self.crl_sign == other.crl_sign
+ and self._encipher_only == other._encipher_only
+ and self._decipher_only == other._decipher_only
+ )
+
+ def __hash__(self) -> int:
+ return hash(
+ (
+ self.digital_signature,
+ self.content_commitment,
+ self.key_encipherment,
+ self.data_encipherment,
+ self.key_agreement,
+ self.key_cert_sign,
+ self.crl_sign,
+ self._encipher_only,
+ self._decipher_only,
+ )
+ )
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class NameConstraints(ExtensionType):
+ oid = ExtensionOID.NAME_CONSTRAINTS
+
+ def __init__(
+ self,
+ permitted_subtrees: typing.Optional[typing.Iterable[GeneralName]],
+ excluded_subtrees: typing.Optional[typing.Iterable[GeneralName]],
+ ) -> None:
+ if permitted_subtrees is not None:
+ permitted_subtrees = list(permitted_subtrees)
+ if not permitted_subtrees:
+ raise ValueError(
+ "permitted_subtrees must be a non-empty list or None"
+ )
+ if not all(isinstance(x, GeneralName) for x in permitted_subtrees):
+ raise TypeError(
+ "permitted_subtrees must be a list of GeneralName objects "
+ "or None"
+ )
+
+ self._validate_tree(permitted_subtrees)
+
+ if excluded_subtrees is not None:
+ excluded_subtrees = list(excluded_subtrees)
+ if not excluded_subtrees:
+ raise ValueError(
+ "excluded_subtrees must be a non-empty list or None"
+ )
+ if not all(isinstance(x, GeneralName) for x in excluded_subtrees):
+ raise TypeError(
+ "excluded_subtrees must be a list of GeneralName objects "
+ "or None"
+ )
+
+ self._validate_tree(excluded_subtrees)
+
+ if permitted_subtrees is None and excluded_subtrees is None:
+ raise ValueError(
+ "At least one of permitted_subtrees and excluded_subtrees "
+ "must not be None"
+ )
+
+ self._permitted_subtrees = permitted_subtrees
+ self._excluded_subtrees = excluded_subtrees
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, NameConstraints):
+ return NotImplemented
+
+ return (
+ self.excluded_subtrees == other.excluded_subtrees
+ and self.permitted_subtrees == other.permitted_subtrees
+ )
+
+ def _validate_tree(self, tree: typing.Iterable[GeneralName]) -> None:
+ self._validate_ip_name(tree)
+ self._validate_dns_name(tree)
+
+ def _validate_ip_name(self, tree: typing.Iterable[GeneralName]) -> None:
+ if any(
+ isinstance(name, IPAddress)
+ and not isinstance(
+ name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network)
+ )
+ for name in tree
+ ):
+ raise TypeError(
+ "IPAddress name constraints must be an IPv4Network or"
+ " IPv6Network object"
+ )
+
+ def _validate_dns_name(self, tree: typing.Iterable[GeneralName]) -> None:
+ if any(
+ isinstance(name, DNSName) and "*" in name.value for name in tree
+ ):
+ raise ValueError(
+ "DNSName name constraints must not contain the '*' wildcard"
+ " character"
+ )
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __hash__(self) -> int:
+ if self.permitted_subtrees is not None:
+ ps: typing.Optional[typing.Tuple[GeneralName, ...]] = tuple(
+ self.permitted_subtrees
+ )
+ else:
+ ps = None
+
+ if self.excluded_subtrees is not None:
+ es: typing.Optional[typing.Tuple[GeneralName, ...]] = tuple(
+ self.excluded_subtrees
+ )
+ else:
+ es = None
+
+ return hash((ps, es))
+
+ @property
+ def permitted_subtrees(
+ self,
+ ) -> typing.Optional[typing.List[GeneralName]]:
+ return self._permitted_subtrees
+
+ @property
+ def excluded_subtrees(
+ self,
+ ) -> typing.Optional[typing.List[GeneralName]]:
+ return self._excluded_subtrees
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class Extension(typing.Generic[ExtensionTypeVar]):
+ def __init__(
+ self, oid: ObjectIdentifier, critical: bool, value: ExtensionTypeVar
+ ) -> None:
+ if not isinstance(oid, ObjectIdentifier):
+ raise TypeError(
+ "oid argument must be an ObjectIdentifier instance."
+ )
+
+ if not isinstance(critical, bool):
+ raise TypeError("critical must be a boolean value")
+
+ self._oid = oid
+ self._critical = critical
+ self._value = value
+
+ @property
+ def oid(self) -> ObjectIdentifier:
+ return self._oid
+
+ @property
+ def critical(self) -> bool:
+ return self._critical
+
+ @property
+ def value(self) -> ExtensionTypeVar:
+ return self._value
+
+ def __repr__(self) -> str:
+ return (
+ ""
+ ).format(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Extension):
+ return NotImplemented
+
+ return (
+ self.oid == other.oid
+ and self.critical == other.critical
+ and self.value == other.value
+ )
+
+ def __hash__(self) -> int:
+ return hash((self.oid, self.critical, self.value))
+
+
+class GeneralNames:
+ def __init__(self, general_names: typing.Iterable[GeneralName]) -> None:
+ general_names = list(general_names)
+ if not all(isinstance(x, GeneralName) for x in general_names):
+ raise TypeError(
+ "Every item in the general_names list must be an "
+ "object conforming to the GeneralName interface"
+ )
+
+ self._general_names = general_names
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[UniformResourceIdentifier],
+ typing.Type[RFC822Name],
+ ],
+ ) -> typing.List[str]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[DirectoryName],
+ ) -> typing.List[Name]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[RegisteredID],
+ ) -> typing.List[ObjectIdentifier]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[IPAddress]
+ ) -> typing.List[_IPAddressTypes]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[OtherName]
+ ) -> typing.List[OtherName]:
+ ...
+
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[DirectoryName],
+ typing.Type[IPAddress],
+ typing.Type[OtherName],
+ typing.Type[RFC822Name],
+ typing.Type[RegisteredID],
+ typing.Type[UniformResourceIdentifier],
+ ],
+ ) -> typing.Union[
+ typing.List[_IPAddressTypes],
+ typing.List[str],
+ typing.List[OtherName],
+ typing.List[Name],
+ typing.List[ObjectIdentifier],
+ ]:
+ # Return the value of each GeneralName, except for OtherName instances
+ # which we return directly because it has two important properties not
+ # just one value.
+ objs = (i for i in self if isinstance(i, type))
+ if type != OtherName:
+ return [i.value for i in objs]
+ return list(objs)
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, GeneralNames):
+ return NotImplemented
+
+ return self._general_names == other._general_names
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._general_names))
+
+
+class SubjectAlternativeName(ExtensionType):
+ oid = ExtensionOID.SUBJECT_ALTERNATIVE_NAME
+
+ def __init__(self, general_names: typing.Iterable[GeneralName]) -> None:
+ self._general_names = GeneralNames(general_names)
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[UniformResourceIdentifier],
+ typing.Type[RFC822Name],
+ ],
+ ) -> typing.List[str]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[DirectoryName],
+ ) -> typing.List[Name]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[RegisteredID],
+ ) -> typing.List[ObjectIdentifier]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[IPAddress]
+ ) -> typing.List[_IPAddressTypes]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[OtherName]
+ ) -> typing.List[OtherName]:
+ ...
+
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[DirectoryName],
+ typing.Type[IPAddress],
+ typing.Type[OtherName],
+ typing.Type[RFC822Name],
+ typing.Type[RegisteredID],
+ typing.Type[UniformResourceIdentifier],
+ ],
+ ) -> typing.Union[
+ typing.List[_IPAddressTypes],
+ typing.List[str],
+ typing.List[OtherName],
+ typing.List[Name],
+ typing.List[ObjectIdentifier],
+ ]:
+ return self._general_names.get_values_for_type(type)
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, SubjectAlternativeName):
+ return NotImplemented
+
+ return self._general_names == other._general_names
+
+ def __hash__(self) -> int:
+ return hash(self._general_names)
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class IssuerAlternativeName(ExtensionType):
+ oid = ExtensionOID.ISSUER_ALTERNATIVE_NAME
+
+ def __init__(self, general_names: typing.Iterable[GeneralName]) -> None:
+ self._general_names = GeneralNames(general_names)
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[UniformResourceIdentifier],
+ typing.Type[RFC822Name],
+ ],
+ ) -> typing.List[str]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[DirectoryName],
+ ) -> typing.List[Name]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[RegisteredID],
+ ) -> typing.List[ObjectIdentifier]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[IPAddress]
+ ) -> typing.List[_IPAddressTypes]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[OtherName]
+ ) -> typing.List[OtherName]:
+ ...
+
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[DirectoryName],
+ typing.Type[IPAddress],
+ typing.Type[OtherName],
+ typing.Type[RFC822Name],
+ typing.Type[RegisteredID],
+ typing.Type[UniformResourceIdentifier],
+ ],
+ ) -> typing.Union[
+ typing.List[_IPAddressTypes],
+ typing.List[str],
+ typing.List[OtherName],
+ typing.List[Name],
+ typing.List[ObjectIdentifier],
+ ]:
+ return self._general_names.get_values_for_type(type)
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, IssuerAlternativeName):
+ return NotImplemented
+
+ return self._general_names == other._general_names
+
+ def __hash__(self) -> int:
+ return hash(self._general_names)
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class CertificateIssuer(ExtensionType):
+ oid = CRLEntryExtensionOID.CERTIFICATE_ISSUER
+
+ def __init__(self, general_names: typing.Iterable[GeneralName]) -> None:
+ self._general_names = GeneralNames(general_names)
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[UniformResourceIdentifier],
+ typing.Type[RFC822Name],
+ ],
+ ) -> typing.List[str]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[DirectoryName],
+ ) -> typing.List[Name]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self,
+ type: typing.Type[RegisteredID],
+ ) -> typing.List[ObjectIdentifier]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[IPAddress]
+ ) -> typing.List[_IPAddressTypes]:
+ ...
+
+ @typing.overload
+ def get_values_for_type(
+ self, type: typing.Type[OtherName]
+ ) -> typing.List[OtherName]:
+ ...
+
+ def get_values_for_type(
+ self,
+ type: typing.Union[
+ typing.Type[DNSName],
+ typing.Type[DirectoryName],
+ typing.Type[IPAddress],
+ typing.Type[OtherName],
+ typing.Type[RFC822Name],
+ typing.Type[RegisteredID],
+ typing.Type[UniformResourceIdentifier],
+ ],
+ ) -> typing.Union[
+ typing.List[_IPAddressTypes],
+ typing.List[str],
+ typing.List[OtherName],
+ typing.List[Name],
+ typing.List[ObjectIdentifier],
+ ]:
+ return self._general_names.get_values_for_type(type)
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CertificateIssuer):
+ return NotImplemented
+
+ return self._general_names == other._general_names
+
+ def __hash__(self) -> int:
+ return hash(self._general_names)
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class CRLReason(ExtensionType):
+ oid = CRLEntryExtensionOID.CRL_REASON
+
+ def __init__(self, reason: ReasonFlags) -> None:
+ if not isinstance(reason, ReasonFlags):
+ raise TypeError("reason must be an element from ReasonFlags")
+
+ self._reason = reason
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CRLReason):
+ return NotImplemented
+
+ return self.reason == other.reason
+
+ def __hash__(self) -> int:
+ return hash(self.reason)
+
+ @property
+ def reason(self) -> ReasonFlags:
+ return self._reason
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class InvalidityDate(ExtensionType):
+ oid = CRLEntryExtensionOID.INVALIDITY_DATE
+
+ def __init__(self, invalidity_date: datetime.datetime) -> None:
+ if not isinstance(invalidity_date, datetime.datetime):
+ raise TypeError("invalidity_date must be a datetime.datetime")
+
+ self._invalidity_date = invalidity_date
+
+ def __repr__(self) -> str:
+ return "".format(
+ self._invalidity_date
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, InvalidityDate):
+ return NotImplemented
+
+ return self.invalidity_date == other.invalidity_date
+
+ def __hash__(self) -> int:
+ return hash(self.invalidity_date)
+
+ @property
+ def invalidity_date(self) -> datetime.datetime:
+ return self._invalidity_date
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class PrecertificateSignedCertificateTimestamps(ExtensionType):
+ oid = ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS
+
+ def __init__(
+ self,
+ signed_certificate_timestamps: typing.Iterable[
+ SignedCertificateTimestamp
+ ],
+ ) -> None:
+ signed_certificate_timestamps = list(signed_certificate_timestamps)
+ if not all(
+ isinstance(sct, SignedCertificateTimestamp)
+ for sct in signed_certificate_timestamps
+ ):
+ raise TypeError(
+ "Every item in the signed_certificate_timestamps list must be "
+ "a SignedCertificateTimestamp"
+ )
+ self._signed_certificate_timestamps = signed_certificate_timestamps
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_signed_certificate_timestamps"
+ )
+
+ def __repr__(self) -> str:
+ return "".format(
+ list(self)
+ )
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._signed_certificate_timestamps))
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, PrecertificateSignedCertificateTimestamps):
+ return NotImplemented
+
+ return (
+ self._signed_certificate_timestamps
+ == other._signed_certificate_timestamps
+ )
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class SignedCertificateTimestamps(ExtensionType):
+ oid = ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS
+
+ def __init__(
+ self,
+ signed_certificate_timestamps: typing.Iterable[
+ SignedCertificateTimestamp
+ ],
+ ) -> None:
+ signed_certificate_timestamps = list(signed_certificate_timestamps)
+ if not all(
+ isinstance(sct, SignedCertificateTimestamp)
+ for sct in signed_certificate_timestamps
+ ):
+ raise TypeError(
+ "Every item in the signed_certificate_timestamps list must be "
+ "a SignedCertificateTimestamp"
+ )
+ self._signed_certificate_timestamps = signed_certificate_timestamps
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_signed_certificate_timestamps"
+ )
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._signed_certificate_timestamps))
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, SignedCertificateTimestamps):
+ return NotImplemented
+
+ return (
+ self._signed_certificate_timestamps
+ == other._signed_certificate_timestamps
+ )
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class OCSPNonce(ExtensionType):
+ oid = OCSPExtensionOID.NONCE
+
+ def __init__(self, nonce: bytes) -> None:
+ if not isinstance(nonce, bytes):
+ raise TypeError("nonce must be bytes")
+
+ self._nonce = nonce
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, OCSPNonce):
+ return NotImplemented
+
+ return self.nonce == other.nonce
+
+ def __hash__(self) -> int:
+ return hash(self.nonce)
+
+ def __repr__(self) -> str:
+ return f""
+
+ @property
+ def nonce(self) -> bytes:
+ return self._nonce
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class IssuingDistributionPoint(ExtensionType):
+ oid = ExtensionOID.ISSUING_DISTRIBUTION_POINT
+
+ def __init__(
+ self,
+ full_name: typing.Optional[typing.Iterable[GeneralName]],
+ relative_name: typing.Optional[RelativeDistinguishedName],
+ only_contains_user_certs: bool,
+ only_contains_ca_certs: bool,
+ only_some_reasons: typing.Optional[typing.FrozenSet[ReasonFlags]],
+ indirect_crl: bool,
+ only_contains_attribute_certs: bool,
+ ) -> None:
+ if full_name is not None:
+ full_name = list(full_name)
+
+ if only_some_reasons and (
+ not isinstance(only_some_reasons, frozenset)
+ or not all(isinstance(x, ReasonFlags) for x in only_some_reasons)
+ ):
+ raise TypeError(
+ "only_some_reasons must be None or frozenset of ReasonFlags"
+ )
+
+ if only_some_reasons and (
+ ReasonFlags.unspecified in only_some_reasons
+ or ReasonFlags.remove_from_crl in only_some_reasons
+ ):
+ raise ValueError(
+ "unspecified and remove_from_crl are not valid reasons in an "
+ "IssuingDistributionPoint"
+ )
+
+ if not (
+ isinstance(only_contains_user_certs, bool)
+ and isinstance(only_contains_ca_certs, bool)
+ and isinstance(indirect_crl, bool)
+ and isinstance(only_contains_attribute_certs, bool)
+ ):
+ raise TypeError(
+ "only_contains_user_certs, only_contains_ca_certs, "
+ "indirect_crl and only_contains_attribute_certs "
+ "must all be boolean."
+ )
+
+ crl_constraints = [
+ only_contains_user_certs,
+ only_contains_ca_certs,
+ indirect_crl,
+ only_contains_attribute_certs,
+ ]
+
+ if len([x for x in crl_constraints if x]) > 1:
+ raise ValueError(
+ "Only one of the following can be set to True: "
+ "only_contains_user_certs, only_contains_ca_certs, "
+ "indirect_crl, only_contains_attribute_certs"
+ )
+
+ if not any(
+ [
+ only_contains_user_certs,
+ only_contains_ca_certs,
+ indirect_crl,
+ only_contains_attribute_certs,
+ full_name,
+ relative_name,
+ only_some_reasons,
+ ]
+ ):
+ raise ValueError(
+ "Cannot create empty extension: "
+ "if only_contains_user_certs, only_contains_ca_certs, "
+ "indirect_crl, and only_contains_attribute_certs are all False"
+ ", then either full_name, relative_name, or only_some_reasons "
+ "must have a value."
+ )
+
+ self._only_contains_user_certs = only_contains_user_certs
+ self._only_contains_ca_certs = only_contains_ca_certs
+ self._indirect_crl = indirect_crl
+ self._only_contains_attribute_certs = only_contains_attribute_certs
+ self._only_some_reasons = only_some_reasons
+ self._full_name = full_name
+ self._relative_name = relative_name
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, IssuingDistributionPoint):
+ return NotImplemented
+
+ return (
+ self.full_name == other.full_name
+ and self.relative_name == other.relative_name
+ and self.only_contains_user_certs == other.only_contains_user_certs
+ and self.only_contains_ca_certs == other.only_contains_ca_certs
+ and self.only_some_reasons == other.only_some_reasons
+ and self.indirect_crl == other.indirect_crl
+ and self.only_contains_attribute_certs
+ == other.only_contains_attribute_certs
+ )
+
+ def __hash__(self) -> int:
+ return hash(
+ (
+ self.full_name,
+ self.relative_name,
+ self.only_contains_user_certs,
+ self.only_contains_ca_certs,
+ self.only_some_reasons,
+ self.indirect_crl,
+ self.only_contains_attribute_certs,
+ )
+ )
+
+ @property
+ def full_name(self) -> typing.Optional[typing.List[GeneralName]]:
+ return self._full_name
+
+ @property
+ def relative_name(self) -> typing.Optional[RelativeDistinguishedName]:
+ return self._relative_name
+
+ @property
+ def only_contains_user_certs(self) -> bool:
+ return self._only_contains_user_certs
+
+ @property
+ def only_contains_ca_certs(self) -> bool:
+ return self._only_contains_ca_certs
+
+ @property
+ def only_some_reasons(
+ self,
+ ) -> typing.Optional[typing.FrozenSet[ReasonFlags]]:
+ return self._only_some_reasons
+
+ @property
+ def indirect_crl(self) -> bool:
+ return self._indirect_crl
+
+ @property
+ def only_contains_attribute_certs(self) -> bool:
+ return self._only_contains_attribute_certs
+
+ def public_bytes(self) -> bytes:
+ return rust_x509.encode_extension_value(self)
+
+
+class UnrecognizedExtension(ExtensionType):
+ def __init__(self, oid: ObjectIdentifier, value: bytes) -> None:
+ if not isinstance(oid, ObjectIdentifier):
+ raise TypeError("oid must be an ObjectIdentifier")
+ self._oid = oid
+ self._value = value
+
+ @property
+ def oid(self) -> ObjectIdentifier: # type: ignore[override]
+ return self._oid
+
+ @property
+ def value(self) -> bytes:
+ return self._value
+
+ def __repr__(self) -> str:
+ return (
+ "".format(self)
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, UnrecognizedExtension):
+ return NotImplemented
+
+ return self.oid == other.oid and self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash((self.oid, self.value))
+
+ def public_bytes(self) -> bytes:
+ return self.value
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/general_name.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/general_name.py
new file mode 100644
index 00000000..ce8367b0
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/general_name.py
@@ -0,0 +1,284 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import abc
+import ipaddress
+import typing
+from email.utils import parseaddr
+
+from cryptography.x509.name import Name
+from cryptography.x509.oid import ObjectIdentifier
+
+_IPAddressTypes = typing.Union[
+ ipaddress.IPv4Address,
+ ipaddress.IPv6Address,
+ ipaddress.IPv4Network,
+ ipaddress.IPv6Network,
+]
+
+
+class UnsupportedGeneralNameType(Exception):
+ pass
+
+
+class GeneralName(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def value(self) -> typing.Any:
+ """
+ Return the value of the object
+ """
+
+
+class RFC822Name(GeneralName):
+ def __init__(self, value: str) -> None:
+ if isinstance(value, str):
+ try:
+ value.encode("ascii")
+ except UnicodeEncodeError:
+ raise ValueError(
+ "RFC822Name values should be passed as an A-label string. "
+ "This means unicode characters should be encoded via "
+ "a library like idna."
+ )
+ else:
+ raise TypeError("value must be string")
+
+ name, address = parseaddr(value)
+ if name or not address:
+ # parseaddr has found a name (e.g. Name ) or the entire
+ # value is an empty string.
+ raise ValueError("Invalid rfc822name value")
+
+ self._value = value
+
+ @property
+ def value(self) -> str:
+ return self._value
+
+ @classmethod
+ def _init_without_validation(cls, value: str) -> "RFC822Name":
+ instance = cls.__new__(cls)
+ instance._value = value
+ return instance
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, RFC822Name):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash(self.value)
+
+
+class DNSName(GeneralName):
+ def __init__(self, value: str) -> None:
+ if isinstance(value, str):
+ try:
+ value.encode("ascii")
+ except UnicodeEncodeError:
+ raise ValueError(
+ "DNSName values should be passed as an A-label string. "
+ "This means unicode characters should be encoded via "
+ "a library like idna."
+ )
+ else:
+ raise TypeError("value must be string")
+
+ self._value = value
+
+ @property
+ def value(self) -> str:
+ return self._value
+
+ @classmethod
+ def _init_without_validation(cls, value: str) -> "DNSName":
+ instance = cls.__new__(cls)
+ instance._value = value
+ return instance
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DNSName):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash(self.value)
+
+
+class UniformResourceIdentifier(GeneralName):
+ def __init__(self, value: str) -> None:
+ if isinstance(value, str):
+ try:
+ value.encode("ascii")
+ except UnicodeEncodeError:
+ raise ValueError(
+ "URI values should be passed as an A-label string. "
+ "This means unicode characters should be encoded via "
+ "a library like idna."
+ )
+ else:
+ raise TypeError("value must be string")
+
+ self._value = value
+
+ @property
+ def value(self) -> str:
+ return self._value
+
+ @classmethod
+ def _init_without_validation(
+ cls, value: str
+ ) -> "UniformResourceIdentifier":
+ instance = cls.__new__(cls)
+ instance._value = value
+ return instance
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, UniformResourceIdentifier):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash(self.value)
+
+
+class DirectoryName(GeneralName):
+ def __init__(self, value: Name) -> None:
+ if not isinstance(value, Name):
+ raise TypeError("value must be a Name")
+
+ self._value = value
+
+ @property
+ def value(self) -> Name:
+ return self._value
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, DirectoryName):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash(self.value)
+
+
+class RegisteredID(GeneralName):
+ def __init__(self, value: ObjectIdentifier) -> None:
+ if not isinstance(value, ObjectIdentifier):
+ raise TypeError("value must be an ObjectIdentifier")
+
+ self._value = value
+
+ @property
+ def value(self) -> ObjectIdentifier:
+ return self._value
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, RegisteredID):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash(self.value)
+
+
+class IPAddress(GeneralName):
+ def __init__(self, value: _IPAddressTypes) -> None:
+ if not isinstance(
+ value,
+ (
+ ipaddress.IPv4Address,
+ ipaddress.IPv6Address,
+ ipaddress.IPv4Network,
+ ipaddress.IPv6Network,
+ ),
+ ):
+ raise TypeError(
+ "value must be an instance of ipaddress.IPv4Address, "
+ "ipaddress.IPv6Address, ipaddress.IPv4Network, or "
+ "ipaddress.IPv6Network"
+ )
+
+ self._value = value
+
+ @property
+ def value(self) -> _IPAddressTypes:
+ return self._value
+
+ def _packed(self) -> bytes:
+ if isinstance(
+ self.value, (ipaddress.IPv4Address, ipaddress.IPv6Address)
+ ):
+ return self.value.packed
+ else:
+ return (
+ self.value.network_address.packed + self.value.netmask.packed
+ )
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, IPAddress):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash(self.value)
+
+
+class OtherName(GeneralName):
+ def __init__(self, type_id: ObjectIdentifier, value: bytes) -> None:
+ if not isinstance(type_id, ObjectIdentifier):
+ raise TypeError("type_id must be an ObjectIdentifier")
+ if not isinstance(value, bytes):
+ raise TypeError("value must be a binary string")
+
+ self._type_id = type_id
+ self._value = value
+
+ @property
+ def type_id(self) -> ObjectIdentifier:
+ return self._type_id
+
+ @property
+ def value(self) -> bytes:
+ return self._value
+
+ def __repr__(self) -> str:
+ return "".format(
+ self.type_id, self.value
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, OtherName):
+ return NotImplemented
+
+ return self.type_id == other.type_id and self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash((self.type_id, self.value))
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/name.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/name.py
new file mode 100644
index 00000000..fd078202
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/name.py
@@ -0,0 +1,460 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import binascii
+import re
+import sys
+import typing
+import warnings
+
+from cryptography import utils
+from cryptography.hazmat.bindings._rust import x509 as rust_x509
+from cryptography.x509.oid import NameOID, ObjectIdentifier
+
+
+class _ASN1Type(utils.Enum):
+ BitString = 3
+ OctetString = 4
+ UTF8String = 12
+ NumericString = 18
+ PrintableString = 19
+ T61String = 20
+ IA5String = 22
+ UTCTime = 23
+ GeneralizedTime = 24
+ VisibleString = 26
+ UniversalString = 28
+ BMPString = 30
+
+
+_ASN1_TYPE_TO_ENUM = {i.value: i for i in _ASN1Type}
+_NAMEOID_DEFAULT_TYPE: typing.Dict[ObjectIdentifier, _ASN1Type] = {
+ NameOID.COUNTRY_NAME: _ASN1Type.PrintableString,
+ NameOID.JURISDICTION_COUNTRY_NAME: _ASN1Type.PrintableString,
+ NameOID.SERIAL_NUMBER: _ASN1Type.PrintableString,
+ NameOID.DN_QUALIFIER: _ASN1Type.PrintableString,
+ NameOID.EMAIL_ADDRESS: _ASN1Type.IA5String,
+ NameOID.DOMAIN_COMPONENT: _ASN1Type.IA5String,
+}
+
+# Type alias
+_OidNameMap = typing.Mapping[ObjectIdentifier, str]
+_NameOidMap = typing.Mapping[str, ObjectIdentifier]
+
+#: Short attribute names from RFC 4514:
+#: https://tools.ietf.org/html/rfc4514#page-7
+_NAMEOID_TO_NAME: _OidNameMap = {
+ NameOID.COMMON_NAME: "CN",
+ NameOID.LOCALITY_NAME: "L",
+ NameOID.STATE_OR_PROVINCE_NAME: "ST",
+ NameOID.ORGANIZATION_NAME: "O",
+ NameOID.ORGANIZATIONAL_UNIT_NAME: "OU",
+ NameOID.COUNTRY_NAME: "C",
+ NameOID.STREET_ADDRESS: "STREET",
+ NameOID.DOMAIN_COMPONENT: "DC",
+ NameOID.USER_ID: "UID",
+}
+_NAME_TO_NAMEOID = {v: k for k, v in _NAMEOID_TO_NAME.items()}
+
+
+def _escape_dn_value(val: typing.Union[str, bytes]) -> str:
+ """Escape special characters in RFC4514 Distinguished Name value."""
+
+ if not val:
+ return ""
+
+ # RFC 4514 Section 2.4 defines the value as being the # (U+0023) character
+ # followed by the hexadecimal encoding of the octets.
+ if isinstance(val, bytes):
+ return "#" + binascii.hexlify(val).decode("utf8")
+
+ # See https://tools.ietf.org/html/rfc4514#section-2.4
+ val = val.replace("\\", "\\\\")
+ val = val.replace('"', '\\"')
+ val = val.replace("+", "\\+")
+ val = val.replace(",", "\\,")
+ val = val.replace(";", "\\;")
+ val = val.replace("<", "\\<")
+ val = val.replace(">", "\\>")
+ val = val.replace("\0", "\\00")
+
+ if val[0] in ("#", " "):
+ val = "\\" + val
+ if val[-1] == " ":
+ val = val[:-1] + "\\ "
+
+ return val
+
+
+def _unescape_dn_value(val: str) -> str:
+ if not val:
+ return ""
+
+ # See https://tools.ietf.org/html/rfc4514#section-3
+
+ # special = escaped / SPACE / SHARP / EQUALS
+ # escaped = DQUOTE / PLUS / COMMA / SEMI / LANGLE / RANGLE
+ def sub(m):
+ val = m.group(1)
+ # Regular escape
+ if len(val) == 1:
+ return val
+ # Hex-value scape
+ return chr(int(val, 16))
+
+ return _RFC4514NameParser._PAIR_RE.sub(sub, val)
+
+
+class NameAttribute:
+ def __init__(
+ self,
+ oid: ObjectIdentifier,
+ value: typing.Union[str, bytes],
+ _type: typing.Optional[_ASN1Type] = None,
+ *,
+ _validate: bool = True,
+ ) -> None:
+ if not isinstance(oid, ObjectIdentifier):
+ raise TypeError(
+ "oid argument must be an ObjectIdentifier instance."
+ )
+ if _type == _ASN1Type.BitString:
+ if oid != NameOID.X500_UNIQUE_IDENTIFIER:
+ raise TypeError(
+ "oid must be X500_UNIQUE_IDENTIFIER for BitString type."
+ )
+ if not isinstance(value, bytes):
+ raise TypeError("value must be bytes for BitString")
+ else:
+ if not isinstance(value, str):
+ raise TypeError("value argument must be a str")
+
+ if (
+ oid == NameOID.COUNTRY_NAME
+ or oid == NameOID.JURISDICTION_COUNTRY_NAME
+ ):
+ assert isinstance(value, str)
+ c_len = len(value.encode("utf8"))
+ if c_len != 2 and _validate is True:
+ raise ValueError(
+ "Country name must be a 2 character country code"
+ )
+ elif c_len != 2:
+ warnings.warn(
+ "Country names should be two characters, but the "
+ "attribute is {} characters in length.".format(c_len),
+ stacklevel=2,
+ )
+
+ # The appropriate ASN1 string type varies by OID and is defined across
+ # multiple RFCs including 2459, 3280, and 5280. In general UTF8String
+ # is preferred (2459), but 3280 and 5280 specify several OIDs with
+ # alternate types. This means when we see the sentinel value we need
+ # to look up whether the OID has a non-UTF8 type. If it does, set it
+ # to that. Otherwise, UTF8!
+ if _type is None:
+ _type = _NAMEOID_DEFAULT_TYPE.get(oid, _ASN1Type.UTF8String)
+
+ if not isinstance(_type, _ASN1Type):
+ raise TypeError("_type must be from the _ASN1Type enum")
+
+ self._oid = oid
+ self._value = value
+ self._type = _type
+
+ @property
+ def oid(self) -> ObjectIdentifier:
+ return self._oid
+
+ @property
+ def value(self) -> typing.Union[str, bytes]:
+ return self._value
+
+ @property
+ def rfc4514_attribute_name(self) -> str:
+ """
+ The short attribute name (for example "CN") if available,
+ otherwise the OID dotted string.
+ """
+ return _NAMEOID_TO_NAME.get(self.oid, self.oid.dotted_string)
+
+ def rfc4514_string(
+ self, attr_name_overrides: typing.Optional[_OidNameMap] = None
+ ) -> str:
+ """
+ Format as RFC4514 Distinguished Name string.
+
+ Use short attribute name if available, otherwise fall back to OID
+ dotted string.
+ """
+ attr_name = (
+ attr_name_overrides.get(self.oid) if attr_name_overrides else None
+ )
+ if attr_name is None:
+ attr_name = self.rfc4514_attribute_name
+
+ return f"{attr_name}={_escape_dn_value(self.value)}"
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, NameAttribute):
+ return NotImplemented
+
+ return self.oid == other.oid and self.value == other.value
+
+ def __hash__(self) -> int:
+ return hash((self.oid, self.value))
+
+ def __repr__(self) -> str:
+ return "".format(self)
+
+
+class RelativeDistinguishedName:
+ def __init__(self, attributes: typing.Iterable[NameAttribute]):
+ attributes = list(attributes)
+ if not attributes:
+ raise ValueError("a relative distinguished name cannot be empty")
+ if not all(isinstance(x, NameAttribute) for x in attributes):
+ raise TypeError("attributes must be an iterable of NameAttribute")
+
+ # Keep list and frozenset to preserve attribute order where it matters
+ self._attributes = attributes
+ self._attribute_set = frozenset(attributes)
+
+ if len(self._attribute_set) != len(attributes):
+ raise ValueError("duplicate attributes are not allowed")
+
+ def get_attributes_for_oid(
+ self, oid: ObjectIdentifier
+ ) -> typing.List[NameAttribute]:
+ return [i for i in self if i.oid == oid]
+
+ def rfc4514_string(
+ self, attr_name_overrides: typing.Optional[_OidNameMap] = None
+ ) -> str:
+ """
+ Format as RFC4514 Distinguished Name string.
+
+ Within each RDN, attributes are joined by '+', although that is rarely
+ used in certificates.
+ """
+ return "+".join(
+ attr.rfc4514_string(attr_name_overrides)
+ for attr in self._attributes
+ )
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, RelativeDistinguishedName):
+ return NotImplemented
+
+ return self._attribute_set == other._attribute_set
+
+ def __hash__(self) -> int:
+ return hash(self._attribute_set)
+
+ def __iter__(self) -> typing.Iterator[NameAttribute]:
+ return iter(self._attributes)
+
+ def __len__(self) -> int:
+ return len(self._attributes)
+
+ def __repr__(self) -> str:
+ return f""
+
+
+class Name:
+ @typing.overload
+ def __init__(self, attributes: typing.Iterable[NameAttribute]) -> None:
+ ...
+
+ @typing.overload
+ def __init__(
+ self, attributes: typing.Iterable[RelativeDistinguishedName]
+ ) -> None:
+ ...
+
+ def __init__(
+ self,
+ attributes: typing.Iterable[
+ typing.Union[NameAttribute, RelativeDistinguishedName]
+ ],
+ ) -> None:
+ attributes = list(attributes)
+ if all(isinstance(x, NameAttribute) for x in attributes):
+ self._attributes = [
+ RelativeDistinguishedName([typing.cast(NameAttribute, x)])
+ for x in attributes
+ ]
+ elif all(isinstance(x, RelativeDistinguishedName) for x in attributes):
+ self._attributes = typing.cast(
+ typing.List[RelativeDistinguishedName], attributes
+ )
+ else:
+ raise TypeError(
+ "attributes must be a list of NameAttribute"
+ " or a list RelativeDistinguishedName"
+ )
+
+ @classmethod
+ def from_rfc4514_string(
+ cls,
+ data: str,
+ attr_name_overrides: typing.Optional[_NameOidMap] = None,
+ ) -> "Name":
+ return _RFC4514NameParser(data, attr_name_overrides or {}).parse()
+
+ def rfc4514_string(
+ self, attr_name_overrides: typing.Optional[_OidNameMap] = None
+ ) -> str:
+ """
+ Format as RFC4514 Distinguished Name string.
+ For example 'CN=foobar.com,O=Foo Corp,C=US'
+
+ An X.509 name is a two-level structure: a list of sets of attributes.
+ Each list element is separated by ',' and within each list element, set
+ elements are separated by '+'. The latter is almost never used in
+ real world certificates. According to RFC4514 section 2.1 the
+ RDNSequence must be reversed when converting to string representation.
+ """
+ return ",".join(
+ attr.rfc4514_string(attr_name_overrides)
+ for attr in reversed(self._attributes)
+ )
+
+ def get_attributes_for_oid(
+ self, oid: ObjectIdentifier
+ ) -> typing.List[NameAttribute]:
+ return [i for i in self if i.oid == oid]
+
+ @property
+ def rdns(self) -> typing.List[RelativeDistinguishedName]:
+ return self._attributes
+
+ def public_bytes(self, backend: typing.Any = None) -> bytes:
+ return rust_x509.encode_name_bytes(self)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Name):
+ return NotImplemented
+
+ return self._attributes == other._attributes
+
+ def __hash__(self) -> int:
+ # TODO: this is relatively expensive, if this looks like a bottleneck
+ # for you, consider optimizing!
+ return hash(tuple(self._attributes))
+
+ def __iter__(self) -> typing.Iterator[NameAttribute]:
+ for rdn in self._attributes:
+ for ava in rdn:
+ yield ava
+
+ def __len__(self) -> int:
+ return sum(len(rdn) for rdn in self._attributes)
+
+ def __repr__(self) -> str:
+ rdns = ",".join(attr.rfc4514_string() for attr in self._attributes)
+ return f""
+
+
+class _RFC4514NameParser:
+ _OID_RE = re.compile(r"(0|([1-9]\d*))(\.(0|([1-9]\d*)))+")
+ _DESCR_RE = re.compile(r"[a-zA-Z][a-zA-Z\d-]*")
+
+ _PAIR = r"\\([\\ #=\"\+,;<>]|[\da-zA-Z]{2})"
+ _PAIR_RE = re.compile(_PAIR)
+ _LUTF1 = r"[\x01-\x1f\x21\x24-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]"
+ _SUTF1 = r"[\x01-\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]"
+ _TUTF1 = r"[\x01-\x1F\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]"
+ _UTFMB = rf"[\x80-{chr(sys.maxunicode)}]"
+ _LEADCHAR = rf"{_LUTF1}|{_UTFMB}"
+ _STRINGCHAR = rf"{_SUTF1}|{_UTFMB}"
+ _TRAILCHAR = rf"{_TUTF1}|{_UTFMB}"
+ _STRING_RE = re.compile(
+ rf"""
+ (
+ ({_LEADCHAR}|{_PAIR})
+ (
+ ({_STRINGCHAR}|{_PAIR})*
+ ({_TRAILCHAR}|{_PAIR})
+ )?
+ )?
+ """,
+ re.VERBOSE,
+ )
+ _HEXSTRING_RE = re.compile(r"#([\da-zA-Z]{2})+")
+
+ def __init__(self, data: str, attr_name_overrides: _NameOidMap) -> None:
+ self._data = data
+ self._idx = 0
+
+ self._attr_name_overrides = attr_name_overrides
+
+ def _has_data(self) -> bool:
+ return self._idx < len(self._data)
+
+ def _peek(self) -> typing.Optional[str]:
+ if self._has_data():
+ return self._data[self._idx]
+ return None
+
+ def _read_char(self, ch: str) -> None:
+ if self._peek() != ch:
+ raise ValueError
+ self._idx += 1
+
+ def _read_re(self, pat) -> str:
+ match = pat.match(self._data, pos=self._idx)
+ if match is None:
+ raise ValueError
+ val = match.group()
+ self._idx += len(val)
+ return val
+
+ def parse(self) -> Name:
+ """
+ Parses the `data` string and converts it to a Name.
+
+ According to RFC4514 section 2.1 the RDNSequence must be
+ reversed when converting to string representation. So, when
+ we parse it, we need to reverse again to get the RDNs on the
+ correct order.
+ """
+ rdns = [self._parse_rdn()]
+
+ while self._has_data():
+ self._read_char(",")
+ rdns.append(self._parse_rdn())
+
+ return Name(reversed(rdns))
+
+ def _parse_rdn(self) -> RelativeDistinguishedName:
+ nas = [self._parse_na()]
+ while self._peek() == "+":
+ self._read_char("+")
+ nas.append(self._parse_na())
+
+ return RelativeDistinguishedName(nas)
+
+ def _parse_na(self) -> NameAttribute:
+ try:
+ oid_value = self._read_re(self._OID_RE)
+ except ValueError:
+ name = self._read_re(self._DESCR_RE)
+ oid = self._attr_name_overrides.get(
+ name, _NAME_TO_NAMEOID.get(name)
+ )
+ if oid is None:
+ raise ValueError
+ else:
+ oid = ObjectIdentifier(oid_value)
+
+ self._read_char("=")
+ if self._peek() == "#":
+ value = self._read_re(self._HEXSTRING_RE)
+ value = binascii.unhexlify(value[1:]).decode()
+ else:
+ raw_value = self._read_re(self._STRING_RE)
+ value = _unescape_dn_value(raw_value)
+
+ return NameAttribute(oid, value)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/ocsp.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/ocsp.py
new file mode 100644
index 00000000..857e75af
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/ocsp.py
@@ -0,0 +1,621 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+import abc
+import datetime
+import typing
+
+from cryptography import utils, x509
+from cryptography.hazmat.bindings._rust import ocsp
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric.types import (
+ CertificateIssuerPrivateKeyTypes,
+)
+from cryptography.x509.base import (
+ _EARLIEST_UTC_TIME,
+ _convert_to_naive_utc_time,
+ _reject_duplicate_extension,
+)
+
+
+class OCSPResponderEncoding(utils.Enum):
+ HASH = "By Hash"
+ NAME = "By Name"
+
+
+class OCSPResponseStatus(utils.Enum):
+ SUCCESSFUL = 0
+ MALFORMED_REQUEST = 1
+ INTERNAL_ERROR = 2
+ TRY_LATER = 3
+ SIG_REQUIRED = 5
+ UNAUTHORIZED = 6
+
+
+_ALLOWED_HASHES = (
+ hashes.SHA1,
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+)
+
+
+def _verify_algorithm(algorithm: hashes.HashAlgorithm) -> None:
+ if not isinstance(algorithm, _ALLOWED_HASHES):
+ raise ValueError(
+ "Algorithm must be SHA1, SHA224, SHA256, SHA384, or SHA512"
+ )
+
+
+class OCSPCertStatus(utils.Enum):
+ GOOD = 0
+ REVOKED = 1
+ UNKNOWN = 2
+
+
+class _SingleResponse:
+ def __init__(
+ self,
+ cert: x509.Certificate,
+ issuer: x509.Certificate,
+ algorithm: hashes.HashAlgorithm,
+ cert_status: OCSPCertStatus,
+ this_update: datetime.datetime,
+ next_update: typing.Optional[datetime.datetime],
+ revocation_time: typing.Optional[datetime.datetime],
+ revocation_reason: typing.Optional[x509.ReasonFlags],
+ ):
+ if not isinstance(cert, x509.Certificate) or not isinstance(
+ issuer, x509.Certificate
+ ):
+ raise TypeError("cert and issuer must be a Certificate")
+
+ _verify_algorithm(algorithm)
+ if not isinstance(this_update, datetime.datetime):
+ raise TypeError("this_update must be a datetime object")
+ if next_update is not None and not isinstance(
+ next_update, datetime.datetime
+ ):
+ raise TypeError("next_update must be a datetime object or None")
+
+ self._cert = cert
+ self._issuer = issuer
+ self._algorithm = algorithm
+ self._this_update = this_update
+ self._next_update = next_update
+
+ if not isinstance(cert_status, OCSPCertStatus):
+ raise TypeError(
+ "cert_status must be an item from the OCSPCertStatus enum"
+ )
+ if cert_status is not OCSPCertStatus.REVOKED:
+ if revocation_time is not None:
+ raise ValueError(
+ "revocation_time can only be provided if the certificate "
+ "is revoked"
+ )
+ if revocation_reason is not None:
+ raise ValueError(
+ "revocation_reason can only be provided if the certificate"
+ " is revoked"
+ )
+ else:
+ if not isinstance(revocation_time, datetime.datetime):
+ raise TypeError("revocation_time must be a datetime object")
+
+ revocation_time = _convert_to_naive_utc_time(revocation_time)
+ if revocation_time < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The revocation_time must be on or after"
+ " 1950 January 1."
+ )
+
+ if revocation_reason is not None and not isinstance(
+ revocation_reason, x509.ReasonFlags
+ ):
+ raise TypeError(
+ "revocation_reason must be an item from the ReasonFlags "
+ "enum or None"
+ )
+
+ self._cert_status = cert_status
+ self._revocation_time = revocation_time
+ self._revocation_reason = revocation_reason
+
+
+class OCSPRequest(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def issuer_key_hash(self) -> bytes:
+ """
+ The hash of the issuer public key
+ """
+
+ @property
+ @abc.abstractmethod
+ def issuer_name_hash(self) -> bytes:
+ """
+ The hash of the issuer name
+ """
+
+ @property
+ @abc.abstractmethod
+ def hash_algorithm(self) -> hashes.HashAlgorithm:
+ """
+ The hash algorithm used in the issuer name and key hashes
+ """
+
+ @property
+ @abc.abstractmethod
+ def serial_number(self) -> int:
+ """
+ The serial number of the cert whose status is being checked
+ """
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding: serialization.Encoding) -> bytes:
+ """
+ Serializes the request to DER
+ """
+
+ @property
+ @abc.abstractmethod
+ def extensions(self) -> x509.Extensions:
+ """
+ The list of request extensions. Not single request extensions.
+ """
+
+
+class OCSPSingleResponse(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def certificate_status(self) -> OCSPCertStatus:
+ """
+ The status of the certificate (an element from the OCSPCertStatus enum)
+ """
+
+ @property
+ @abc.abstractmethod
+ def revocation_time(self) -> typing.Optional[datetime.datetime]:
+ """
+ The date of when the certificate was revoked or None if not
+ revoked.
+ """
+
+ @property
+ @abc.abstractmethod
+ def revocation_reason(self) -> typing.Optional[x509.ReasonFlags]:
+ """
+ The reason the certificate was revoked or None if not specified or
+ not revoked.
+ """
+
+ @property
+ @abc.abstractmethod
+ def this_update(self) -> datetime.datetime:
+ """
+ The most recent time at which the status being indicated is known by
+ the responder to have been correct
+ """
+
+ @property
+ @abc.abstractmethod
+ def next_update(self) -> typing.Optional[datetime.datetime]:
+ """
+ The time when newer information will be available
+ """
+
+ @property
+ @abc.abstractmethod
+ def issuer_key_hash(self) -> bytes:
+ """
+ The hash of the issuer public key
+ """
+
+ @property
+ @abc.abstractmethod
+ def issuer_name_hash(self) -> bytes:
+ """
+ The hash of the issuer name
+ """
+
+ @property
+ @abc.abstractmethod
+ def hash_algorithm(self) -> hashes.HashAlgorithm:
+ """
+ The hash algorithm used in the issuer name and key hashes
+ """
+
+ @property
+ @abc.abstractmethod
+ def serial_number(self) -> int:
+ """
+ The serial number of the cert whose status is being checked
+ """
+
+
+class OCSPResponse(metaclass=abc.ABCMeta):
+ @property
+ @abc.abstractmethod
+ def responses(self) -> typing.Iterator[OCSPSingleResponse]:
+ """
+ An iterator over the individual SINGLERESP structures in the
+ response
+ """
+
+ @property
+ @abc.abstractmethod
+ def response_status(self) -> OCSPResponseStatus:
+ """
+ The status of the response. This is a value from the OCSPResponseStatus
+ enumeration
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_algorithm_oid(self) -> x509.ObjectIdentifier:
+ """
+ The ObjectIdentifier of the signature algorithm
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature_hash_algorithm(
+ self,
+ ) -> typing.Optional[hashes.HashAlgorithm]:
+ """
+ Returns a HashAlgorithm corresponding to the type of the digest signed
+ """
+
+ @property
+ @abc.abstractmethod
+ def signature(self) -> bytes:
+ """
+ The signature bytes
+ """
+
+ @property
+ @abc.abstractmethod
+ def tbs_response_bytes(self) -> bytes:
+ """
+ The tbsResponseData bytes
+ """
+
+ @property
+ @abc.abstractmethod
+ def certificates(self) -> typing.List[x509.Certificate]:
+ """
+ A list of certificates used to help build a chain to verify the OCSP
+ response. This situation occurs when the OCSP responder uses a delegate
+ certificate.
+ """
+
+ @property
+ @abc.abstractmethod
+ def responder_key_hash(self) -> typing.Optional[bytes]:
+ """
+ The responder's key hash or None
+ """
+
+ @property
+ @abc.abstractmethod
+ def responder_name(self) -> typing.Optional[x509.Name]:
+ """
+ The responder's Name or None
+ """
+
+ @property
+ @abc.abstractmethod
+ def produced_at(self) -> datetime.datetime:
+ """
+ The time the response was produced
+ """
+
+ @property
+ @abc.abstractmethod
+ def certificate_status(self) -> OCSPCertStatus:
+ """
+ The status of the certificate (an element from the OCSPCertStatus enum)
+ """
+
+ @property
+ @abc.abstractmethod
+ def revocation_time(self) -> typing.Optional[datetime.datetime]:
+ """
+ The date of when the certificate was revoked or None if not
+ revoked.
+ """
+
+ @property
+ @abc.abstractmethod
+ def revocation_reason(self) -> typing.Optional[x509.ReasonFlags]:
+ """
+ The reason the certificate was revoked or None if not specified or
+ not revoked.
+ """
+
+ @property
+ @abc.abstractmethod
+ def this_update(self) -> datetime.datetime:
+ """
+ The most recent time at which the status being indicated is known by
+ the responder to have been correct
+ """
+
+ @property
+ @abc.abstractmethod
+ def next_update(self) -> typing.Optional[datetime.datetime]:
+ """
+ The time when newer information will be available
+ """
+
+ @property
+ @abc.abstractmethod
+ def issuer_key_hash(self) -> bytes:
+ """
+ The hash of the issuer public key
+ """
+
+ @property
+ @abc.abstractmethod
+ def issuer_name_hash(self) -> bytes:
+ """
+ The hash of the issuer name
+ """
+
+ @property
+ @abc.abstractmethod
+ def hash_algorithm(self) -> hashes.HashAlgorithm:
+ """
+ The hash algorithm used in the issuer name and key hashes
+ """
+
+ @property
+ @abc.abstractmethod
+ def serial_number(self) -> int:
+ """
+ The serial number of the cert whose status is being checked
+ """
+
+ @property
+ @abc.abstractmethod
+ def extensions(self) -> x509.Extensions:
+ """
+ The list of response extensions. Not single response extensions.
+ """
+
+ @property
+ @abc.abstractmethod
+ def single_extensions(self) -> x509.Extensions:
+ """
+ The list of single response extensions. Not response extensions.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding: serialization.Encoding) -> bytes:
+ """
+ Serializes the response to DER
+ """
+
+
+class OCSPRequestBuilder:
+ def __init__(
+ self,
+ request: typing.Optional[
+ typing.Tuple[
+ x509.Certificate, x509.Certificate, hashes.HashAlgorithm
+ ]
+ ] = None,
+ request_hash: typing.Optional[
+ typing.Tuple[bytes, bytes, int, hashes.HashAlgorithm]
+ ] = None,
+ extensions: typing.List[x509.Extension[x509.ExtensionType]] = [],
+ ) -> None:
+ self._request = request
+ self._request_hash = request_hash
+ self._extensions = extensions
+
+ def add_certificate(
+ self,
+ cert: x509.Certificate,
+ issuer: x509.Certificate,
+ algorithm: hashes.HashAlgorithm,
+ ) -> "OCSPRequestBuilder":
+ if self._request is not None or self._request_hash is not None:
+ raise ValueError("Only one certificate can be added to a request")
+
+ _verify_algorithm(algorithm)
+ if not isinstance(cert, x509.Certificate) or not isinstance(
+ issuer, x509.Certificate
+ ):
+ raise TypeError("cert and issuer must be a Certificate")
+
+ return OCSPRequestBuilder(
+ (cert, issuer, algorithm), self._request_hash, self._extensions
+ )
+
+ def add_certificate_by_hash(
+ self,
+ issuer_name_hash: bytes,
+ issuer_key_hash: bytes,
+ serial_number: int,
+ algorithm: hashes.HashAlgorithm,
+ ) -> "OCSPRequestBuilder":
+ if self._request is not None or self._request_hash is not None:
+ raise ValueError("Only one certificate can be added to a request")
+
+ if not isinstance(serial_number, int):
+ raise TypeError("serial_number must be an integer")
+
+ _verify_algorithm(algorithm)
+ utils._check_bytes("issuer_name_hash", issuer_name_hash)
+ utils._check_bytes("issuer_key_hash", issuer_key_hash)
+ if algorithm.digest_size != len(
+ issuer_name_hash
+ ) or algorithm.digest_size != len(issuer_key_hash):
+ raise ValueError(
+ "issuer_name_hash and issuer_key_hash must be the same length "
+ "as the digest size of the algorithm"
+ )
+
+ return OCSPRequestBuilder(
+ self._request,
+ (issuer_name_hash, issuer_key_hash, serial_number, algorithm),
+ self._extensions,
+ )
+
+ def add_extension(
+ self, extval: x509.ExtensionType, critical: bool
+ ) -> "OCSPRequestBuilder":
+ if not isinstance(extval, x509.ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = x509.Extension(extval.oid, critical, extval)
+ _reject_duplicate_extension(extension, self._extensions)
+
+ return OCSPRequestBuilder(
+ self._request, self._request_hash, self._extensions + [extension]
+ )
+
+ def build(self) -> OCSPRequest:
+ if self._request is None and self._request_hash is None:
+ raise ValueError("You must add a certificate before building")
+
+ return ocsp.create_ocsp_request(self)
+
+
+class OCSPResponseBuilder:
+ def __init__(
+ self,
+ response: typing.Optional[_SingleResponse] = None,
+ responder_id: typing.Optional[
+ typing.Tuple[x509.Certificate, OCSPResponderEncoding]
+ ] = None,
+ certs: typing.Optional[typing.List[x509.Certificate]] = None,
+ extensions: typing.List[x509.Extension[x509.ExtensionType]] = [],
+ ):
+ self._response = response
+ self._responder_id = responder_id
+ self._certs = certs
+ self._extensions = extensions
+
+ def add_response(
+ self,
+ cert: x509.Certificate,
+ issuer: x509.Certificate,
+ algorithm: hashes.HashAlgorithm,
+ cert_status: OCSPCertStatus,
+ this_update: datetime.datetime,
+ next_update: typing.Optional[datetime.datetime],
+ revocation_time: typing.Optional[datetime.datetime],
+ revocation_reason: typing.Optional[x509.ReasonFlags],
+ ) -> "OCSPResponseBuilder":
+ if self._response is not None:
+ raise ValueError("Only one response per OCSPResponse.")
+
+ singleresp = _SingleResponse(
+ cert,
+ issuer,
+ algorithm,
+ cert_status,
+ this_update,
+ next_update,
+ revocation_time,
+ revocation_reason,
+ )
+ return OCSPResponseBuilder(
+ singleresp,
+ self._responder_id,
+ self._certs,
+ self._extensions,
+ )
+
+ def responder_id(
+ self, encoding: OCSPResponderEncoding, responder_cert: x509.Certificate
+ ) -> "OCSPResponseBuilder":
+ if self._responder_id is not None:
+ raise ValueError("responder_id can only be set once")
+ if not isinstance(responder_cert, x509.Certificate):
+ raise TypeError("responder_cert must be a Certificate")
+ if not isinstance(encoding, OCSPResponderEncoding):
+ raise TypeError(
+ "encoding must be an element from OCSPResponderEncoding"
+ )
+
+ return OCSPResponseBuilder(
+ self._response,
+ (responder_cert, encoding),
+ self._certs,
+ self._extensions,
+ )
+
+ def certificates(
+ self, certs: typing.Iterable[x509.Certificate]
+ ) -> "OCSPResponseBuilder":
+ if self._certs is not None:
+ raise ValueError("certificates may only be set once")
+ certs = list(certs)
+ if len(certs) == 0:
+ raise ValueError("certs must not be an empty list")
+ if not all(isinstance(x, x509.Certificate) for x in certs):
+ raise TypeError("certs must be a list of Certificates")
+ return OCSPResponseBuilder(
+ self._response,
+ self._responder_id,
+ certs,
+ self._extensions,
+ )
+
+ def add_extension(
+ self, extval: x509.ExtensionType, critical: bool
+ ) -> "OCSPResponseBuilder":
+ if not isinstance(extval, x509.ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = x509.Extension(extval.oid, critical, extval)
+ _reject_duplicate_extension(extension, self._extensions)
+
+ return OCSPResponseBuilder(
+ self._response,
+ self._responder_id,
+ self._certs,
+ self._extensions + [extension],
+ )
+
+ def sign(
+ self,
+ private_key: CertificateIssuerPrivateKeyTypes,
+ algorithm: typing.Optional[hashes.HashAlgorithm],
+ ) -> OCSPResponse:
+ if self._response is None:
+ raise ValueError("You must add a response before signing")
+ if self._responder_id is None:
+ raise ValueError("You must add a responder_id before signing")
+
+ return ocsp.create_ocsp_response(
+ OCSPResponseStatus.SUCCESSFUL, self, private_key, algorithm
+ )
+
+ @classmethod
+ def build_unsuccessful(
+ cls, response_status: OCSPResponseStatus
+ ) -> OCSPResponse:
+ if not isinstance(response_status, OCSPResponseStatus):
+ raise TypeError(
+ "response_status must be an item from OCSPResponseStatus"
+ )
+ if response_status is OCSPResponseStatus.SUCCESSFUL:
+ raise ValueError("response_status cannot be SUCCESSFUL")
+
+ return ocsp.create_ocsp_response(response_status, None, None, None)
+
+
+def load_der_ocsp_request(data: bytes) -> OCSPRequest:
+ return ocsp.load_der_ocsp_request(data)
+
+
+def load_der_ocsp_response(data: bytes) -> OCSPResponse:
+ return ocsp.load_der_ocsp_response(data)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/oid.py b/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/oid.py
new file mode 100644
index 00000000..0d91a546
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/cryptography/x509/oid.py
@@ -0,0 +1,31 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from cryptography.hazmat._oid import (
+ AttributeOID,
+ AuthorityInformationAccessOID,
+ CertificatePoliciesOID,
+ CRLEntryExtensionOID,
+ ExtendedKeyUsageOID,
+ ExtensionOID,
+ NameOID,
+ ObjectIdentifier,
+ OCSPExtensionOID,
+ SignatureAlgorithmOID,
+ SubjectInformationAccessOID,
+)
+
+__all__ = [
+ "AttributeOID",
+ "AuthorityInformationAccessOID",
+ "CRLEntryExtensionOID",
+ "CertificatePoliciesOID",
+ "ExtendedKeyUsageOID",
+ "ExtensionOID",
+ "NameOID",
+ "OCSPExtensionOID",
+ "ObjectIdentifier",
+ "SignatureAlgorithmOID",
+ "SubjectInformationAccessOID",
+]
diff --git a/Voltage Vikings/lib/python3.9/site-packages/defusedxml-0.7.1.dist-info/INSTALLER b/Voltage Vikings/lib/python3.9/site-packages/defusedxml-0.7.1.dist-info/INSTALLER
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/defusedxml-0.7.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/Voltage Vikings/lib/python3.9/site-packages/defusedxml-0.7.1.dist-info/LICENSE b/Voltage Vikings/lib/python3.9/site-packages/defusedxml-0.7.1.dist-info/LICENSE
new file mode 100644
index 00000000..311690c6
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/defusedxml-0.7.1.dist-info/LICENSE
@@ -0,0 +1,49 @@
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python
+alone or in any derivative version, provided, however, that PSF's
+License Agreement and PSF's notice of copyright, i.e., "Copyright (c)
+2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Python Software Foundation;
+All Rights Reserved" are retained in Python alone or in any derivative
+version prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
diff --git a/Voltage Vikings/lib/python3.9/site-packages/defusedxml-0.7.1.dist-info/METADATA b/Voltage Vikings/lib/python3.9/site-packages/defusedxml-0.7.1.dist-info/METADATA
new file mode 100644
index 00000000..f916e891
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/defusedxml-0.7.1.dist-info/METADATA
@@ -0,0 +1,978 @@
+Metadata-Version: 2.1
+Name: defusedxml
+Version: 0.7.1
+Summary: XML bomb protection for Python stdlib modules
+Home-page: https://github.com/tiran/defusedxml
+Author: Christian Heimes
+Author-email: christian@python.org
+Maintainer: Christian Heimes
+Maintainer-email: christian@python.org
+License: PSFL
+Download-URL: https://pypi.python.org/pypi/defusedxml
+Keywords: xml bomb DoS
+Platform: all
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: Natural Language :: English
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Topic :: Text Processing :: Markup :: XML
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
+
+===================================================
+defusedxml -- defusing XML bombs and other exploits
+===================================================
+
+.. image:: https://img.shields.io/pypi/v/defusedxml.svg
+ :target: https://pypi.org/project/defusedxml/
+ :alt: Latest Version
+
+.. image:: https://img.shields.io/pypi/pyversions/defusedxml.svg
+ :target: https://pypi.org/project/defusedxml/
+ :alt: Supported Python versions
+
+.. image:: https://travis-ci.org/tiran/defusedxml.svg?branch=master
+ :target: https://travis-ci.org/tiran/defusedxml
+ :alt: Travis CI
+
+.. image:: https://codecov.io/github/tiran/defusedxml/coverage.svg?branch=master
+ :target: https://codecov.io/github/tiran/defusedxml?branch=master
+ :alt: codecov
+
+.. image:: https://img.shields.io/pypi/dm/defusedxml.svg
+ :target: https://pypistats.org/packages/defusedxml
+ :alt: PyPI downloads
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: black
+
+..
+
+ "It's just XML, what could probably go wrong?"
+
+Christian Heimes
+
+Synopsis
+========
+
+The results of an attack on a vulnerable XML library can be fairly dramatic.
+With just a few hundred **Bytes** of XML data an attacker can occupy several
+**Gigabytes** of memory within **seconds**. An attacker can also keep
+CPUs busy for a long time with a small to medium size request. Under some
+circumstances it is even possible to access local files on your
+server, to circumvent a firewall, or to abuse services to rebound attacks to
+third parties.
+
+The attacks use and abuse less common features of XML and its parsers. The
+majority of developers are unacquainted with features such as processing
+instructions and entity expansions that XML inherited from SGML. At best
+they know about ```` from experience with HTML but they are not
+aware that a document type definition (DTD) can generate an HTTP request
+or load a file from the file system.
+
+None of the issues is new. They have been known for a long time. Billion
+laughs was first reported in 2003. Nevertheless some XML libraries and
+applications are still vulnerable and even heavy users of XML are
+surprised by these features. It's hard to say whom to blame for the
+situation. It's too short sighted to shift all blame on XML parsers and
+XML libraries for using insecure default settings. After all they
+properly implement XML specifications. Application developers must not rely
+that a library is always configured for security and potential harmful data
+by default.
+
+
+.. contents:: Table of Contents
+ :depth: 2
+
+
+Attack vectors
+==============
+
+billion laughs / exponential entity expansion
+---------------------------------------------
+
+The `Billion Laughs`_ attack -- also known as exponential entity expansion --
+uses multiple levels of nested entities. The original example uses 9 levels
+of 10 expansions in each level to expand the string ``lol`` to a string of
+3 * 10 :sup:`9` bytes, hence the name "billion laughs". The resulting string
+occupies 3 GB (2.79 GiB) of memory; intermediate strings require additional
+memory. Because most parsers don't cache the intermediate step for every
+expansion it is repeated over and over again. It increases the CPU load even
+more.
+
+An XML document of just a few hundred bytes can disrupt all services on a
+machine within seconds.
+
+Example XML::
+
+
+
+
+
+ ]>
+ &d;
+
+
+quadratic blowup entity expansion
+---------------------------------
+
+A quadratic blowup attack is similar to a `Billion Laughs`_ attack; it abuses
+entity expansion, too. Instead of nested entities it repeats one large entity
+with a couple of thousand chars over and over again. The attack isn't as
+efficient as the exponential case but it avoids triggering countermeasures of
+parsers against heavily nested entities. Some parsers limit the depth and
+breadth of a single entity but not the total amount of expanded text
+throughout an entire XML document.
+
+A medium-sized XML document with a couple of hundred kilobytes can require a
+couple of hundred MB to several GB of memory. When the attack is combined
+with some level of nested expansion an attacker is able to achieve a higher
+ratio of success.
+
+::
+
+
+ ]>
+ &a;&a;&a;... repeat
+
+
+external entity expansion (remote)
+----------------------------------
+
+Entity declarations can contain more than just text for replacement. They can
+also point to external resources by public identifiers or system identifiers.
+System identifiers are standard URIs. When the URI is a URL (e.g. a
+``http://`` locator) some parsers download the resource from the remote
+location and embed them into the XML document verbatim.
+
+Simple example of a parsed external entity::
+
+
+ ]>
+ ⅇ
+
+The case of parsed external entities works only for valid XML content. The
+XML standard also supports unparsed external entities with a
+``NData declaration``.
+
+External entity expansion opens the door to plenty of exploits. An attacker
+can abuse a vulnerable XML library and application to rebound and forward
+network requests with the IP address of the server. It highly depends
+on the parser and the application what kind of exploit is possible. For
+example:
+
+* An attacker can circumvent firewalls and gain access to restricted
+ resources as all the requests are made from an internal and trustworthy
+ IP address, not from the outside.
+* An attacker can abuse a service to attack, spy on or DoS your servers but
+ also third party services. The attack is disguised with the IP address of
+ the server and the attacker is able to utilize the high bandwidth of a big
+ machine.
+* An attacker can exhaust additional resources on the machine, e.g. with
+ requests to a service that doesn't respond or responds with very large
+ files.
+* An attacker may gain knowledge, when, how often and from which IP address
+ an XML document is accessed.
+* An attacker could send mail from inside your network if the URL handler
+ supports ``smtp://`` URIs.
+
+
+external entity expansion (local file)
+--------------------------------------
+
+External entities with references to local files are a sub-case of external
+entity expansion. It's listed as an extra attack because it deserves extra
+attention. Some XML libraries such as lxml disable network access by default
+but still allow entity expansion with local file access by default. Local
+files are either referenced with a ``file://`` URL or by a file path (either
+relative or absolute).
+
+An attacker may be able to access and download all files that can be read by
+the application process. This may include critical configuration files, too.
+
+::
+
+
+ ]>
+ ⅇ
+
+
+DTD retrieval
+-------------
+
+This case is similar to external entity expansion, too. Some XML libraries
+like Python's xml.dom.pulldom retrieve document type definitions from remote
+or local locations. Several attack scenarios from the external entity case
+apply to this issue as well.
+
+::
+
+
+
+
+
+ text
+
+
+
+Python XML Libraries
+====================
+
+.. csv-table:: vulnerabilities and features
+ :header: "kind", "sax", "etree", "minidom", "pulldom", "xmlrpc", "lxml", "genshi"
+ :widths: 24, 7, 8, 8, 7, 8, 8, 8
+ :stub-columns: 0
+
+ "billion laughs", "**True**", "**True**", "**True**", "**True**", "**True**", "False (1)", "False (5)"
+ "quadratic blowup", "**True**", "**True**", "**True**", "**True**", "**True**", "**True**", "False (5)"
+ "external entity expansion (remote)", "**True**", "False (3)", "False (4)", "**True**", "false", "False (1)", "False (5)"
+ "external entity expansion (local file)", "**True**", "False (3)", "False (4)", "**True**", "false", "**True**", "False (5)"
+ "DTD retrieval", "**True**", "False", "False", "**True**", "false", "False (1)", "False"
+ "gzip bomb", "False", "False", "False", "False", "**True**", "**partly** (2)", "False"
+ "xpath support (7)", "False", "False", "False", "False", "False", "**True**", "False"
+ "xsl(t) support (7)", "False", "False", "False", "False", "False", "**True**", "False"
+ "xinclude support (7)", "False", "**True** (6)", "False", "False", "False", "**True** (6)", "**True**"
+ "C library", "expat", "expat", "expat", "expat", "expat", "libxml2", "expat"
+
+1. Lxml is protected against billion laughs attacks and doesn't do network
+ lookups by default.
+2. libxml2 and lxml are not directly vulnerable to gzip decompression bombs
+ but they don't protect you against them either.
+3. xml.etree doesn't expand entities and raises a ParserError when an entity
+ occurs.
+4. minidom doesn't expand entities and simply returns the unexpanded entity
+ verbatim.
+5. genshi.input of genshi 0.6 doesn't support entity expansion and raises a
+ ParserError when an entity occurs.
+6. Library has (limited) XInclude support but requires an additional step to
+ process inclusion.
+7. These are features but they may introduce exploitable holes, see
+ `Other things to consider`_
+
+
+Settings in standard library
+----------------------------
+
+
+xml.sax.handler Features
+........................
+
+feature_external_ges (http://xml.org/sax/features/external-general-entities)
+ disables external entity expansion
+
+feature_external_pes (http://xml.org/sax/features/external-parameter-entities)
+ the option is ignored and doesn't modify any functionality
+
+DOM xml.dom.xmlbuilder.Options
+..............................
+
+external_parameter_entities
+ ignored
+
+external_general_entities
+ ignored
+
+external_dtd_subset
+ ignored
+
+entities
+ unsure
+
+
+defusedxml
+==========
+
+The `defusedxml package`_ (`defusedxml on PyPI`_)
+contains several Python-only workarounds and fixes
+for denial of service and other vulnerabilities in Python's XML libraries.
+In order to benefit from the protection you just have to import and use the
+listed functions / classes from the right defusedxml module instead of the
+original module. Merely `defusedxml.xmlrpc`_ is implemented as monkey patch.
+
+Instead of::
+
+ >>> from xml.etree.ElementTree import parse
+ >>> et = parse(xmlfile)
+
+alter code to::
+
+ >>> from defusedxml.ElementTree import parse
+ >>> et = parse(xmlfile)
+
+Additionally the package has an **untested** function to monkey patch
+all stdlib modules with ``defusedxml.defuse_stdlib()``.
+
+All functions and parser classes accept three additional keyword arguments.
+They return either the same objects as the original functions or compatible
+subclasses.
+
+forbid_dtd (default: False)
+ disallow XML with a ```` processing instruction and raise a
+ *DTDForbidden* exception when a DTD processing instruction is found.
+
+forbid_entities (default: True)
+ disallow XML with ```` declarations inside the DTD and raise an
+ *EntitiesForbidden* exception when an entity is declared.
+
+forbid_external (default: True)
+ disallow any access to remote or local resources in external entities
+ or DTD and raising an *ExternalReferenceForbidden* exception when a DTD
+ or entity references an external resource.
+
+
+defusedxml (package)
+--------------------
+
+DefusedXmlException, DTDForbidden, EntitiesForbidden,
+ExternalReferenceForbidden, NotSupportedError
+
+defuse_stdlib() (*experimental*)
+
+
+defusedxml.cElementTree
+-----------------------
+
+**NOTE** ``defusedxml.cElementTree`` is deprecated and will be removed in a
+future release. Import from ``defusedxml.ElementTree`` instead.
+
+parse(), iterparse(), fromstring(), XMLParser
+
+
+defusedxml.ElementTree
+-----------------------
+
+parse(), iterparse(), fromstring(), XMLParser
+
+
+defusedxml.expatreader
+----------------------
+
+create_parser(), DefusedExpatParser
+
+
+defusedxml.sax
+--------------
+
+parse(), parseString(), make_parser()
+
+
+defusedxml.expatbuilder
+-----------------------
+
+parse(), parseString(), DefusedExpatBuilder, DefusedExpatBuilderNS
+
+
+defusedxml.minidom
+------------------
+
+parse(), parseString()
+
+
+defusedxml.pulldom
+------------------
+
+parse(), parseString()
+
+
+defusedxml.xmlrpc
+-----------------
+
+The fix is implemented as monkey patch for the stdlib's xmlrpc package (3.x)
+or xmlrpclib module (2.x). The function `monkey_patch()` enables the fixes,
+`unmonkey_patch()` removes the patch and puts the code in its former state.
+
+The monkey patch protects against XML related attacks as well as
+decompression bombs and excessively large requests or responses. The default
+setting is 30 MB for requests, responses and gzip decompression. You can
+modify the default by changing the module variable `MAX_DATA`. A value of
+`-1` disables the limit.
+
+
+defusedxml.lxml
+---------------
+
+**DEPRECATED** The module is deprecated and will be removed in a future
+release.
+
+The module acts as an *example* how you could protect code that uses
+lxml.etree. It implements a custom Element class that filters out
+Entity instances, a custom parser factory and a thread local storage for
+parser instances. It also has a check_docinfo() function which inspects
+a tree for internal or external DTDs and entity declarations. In order to
+check for entities lxml > 3.0 is required.
+
+parse(), fromstring()
+RestrictedElement, GlobalParserTLS, getDefaultParser(), check_docinfo()
+
+
+defusedexpat
+============
+
+The `defusedexpat package`_ (`defusedexpat on PyPI`_)
+comes with binary extensions and a
+`modified expat`_ library instead of the standard `expat parser`_. It's
+basically a stand-alone version of the patches for Python's standard
+library C extensions.
+
+Modifications in expat
+----------------------
+
+new definitions::
+
+ XML_BOMB_PROTECTION
+ XML_DEFAULT_MAX_ENTITY_INDIRECTIONS
+ XML_DEFAULT_MAX_ENTITY_EXPANSIONS
+ XML_DEFAULT_RESET_DTD
+
+new XML_FeatureEnum members::
+
+ XML_FEATURE_MAX_ENTITY_INDIRECTIONS
+ XML_FEATURE_MAX_ENTITY_EXPANSIONS
+ XML_FEATURE_IGNORE_DTD
+
+new XML_Error members::
+
+ XML_ERROR_ENTITY_INDIRECTIONS
+ XML_ERROR_ENTITY_EXPANSION
+
+new API functions::
+
+ int XML_GetFeature(XML_Parser parser,
+ enum XML_FeatureEnum feature,
+ long *value);
+ int XML_SetFeature(XML_Parser parser,
+ enum XML_FeatureEnum feature,
+ long value);
+ int XML_GetFeatureDefault(enum XML_FeatureEnum feature,
+ long *value);
+ int XML_SetFeatureDefault(enum XML_FeatureEnum feature,
+ long value);
+
+XML_FEATURE_MAX_ENTITY_INDIRECTIONS
+ Limit the amount of indirections that are allowed to occur during the
+ expansion of a nested entity. A counter starts when an entity reference
+ is encountered. It resets after the entity is fully expanded. The limit
+ protects the parser against exponential entity expansion attacks (aka
+ billion laughs attack). When the limit is exceeded the parser stops and
+ fails with `XML_ERROR_ENTITY_INDIRECTIONS`.
+ A value of 0 disables the protection.
+
+ Supported range
+ 0 .. UINT_MAX
+ Default
+ 40
+
+XML_FEATURE_MAX_ENTITY_EXPANSIONS
+ Limit the total length of all entity expansions throughout the entire
+ document. The lengths of all entities are accumulated in a parser variable.
+ The setting protects against quadratic blowup attacks (lots of expansions
+ of a large entity declaration). When the sum of all entities exceeds
+ the limit, the parser stops and fails with `XML_ERROR_ENTITY_EXPANSION`.
+ A value of 0 disables the protection.
+
+ Supported range
+ 0 .. UINT_MAX
+ Default
+ 8 MiB
+
+XML_FEATURE_RESET_DTD
+ Reset all DTD information after the block has been parsed. When
+ the flag is set (default: false) all DTD information after the
+ endDoctypeDeclHandler has been called. The flag can be set inside the
+ endDoctypeDeclHandler. Without DTD information any entity reference in
+ the document body leads to `XML_ERROR_UNDEFINED_ENTITY`.
+
+ Supported range
+ 0, 1
+ Default
+ 0
+
+
+How to avoid XML vulnerabilities
+================================
+
+Best practices
+--------------
+
+* Don't allow DTDs
+* Don't expand entities
+* Don't resolve externals
+* Limit parse depth
+* Limit total input size
+* Limit parse time
+* Favor a SAX or iterparse-like parser for potential large data
+* Validate and properly quote arguments to XSL transformations and
+ XPath queries
+* Don't use XPath expression from untrusted sources
+* Don't apply XSL transformations that come untrusted sources
+
+(based on Brad Hill's `Attacking XML Security`_)
+
+
+Other things to consider
+========================
+
+XML, XML parsers and processing libraries have more features and possible
+issue that could lead to DoS vulnerabilities or security exploits in
+applications. I have compiled an incomplete list of theoretical issues that
+need further research and more attention. The list is deliberately pessimistic
+and a bit paranoid, too. It contains things that might go wrong under daffy
+circumstances.
+
+
+attribute blowup / hash collision attack
+----------------------------------------
+
+XML parsers may use an algorithm with quadratic runtime O(n :sup:`2`) to
+handle attributes and namespaces. If it uses hash tables (dictionaries) to
+store attributes and namespaces the implementation may be vulnerable to
+hash collision attacks, thus reducing the performance to O(n :sup:`2`) again.
+In either case an attacker is able to forge a denial of service attack with
+an XML document that contains thousands upon thousands of attributes in
+a single node.
+
+I haven't researched yet if expat, pyexpat or libxml2 are vulnerable.
+
+
+decompression bomb
+------------------
+
+The issue of decompression bombs (aka `ZIP bomb`_) apply to all XML libraries
+that can parse compressed XML stream like gzipped HTTP streams or LZMA-ed
+files. For an attacker it can reduce the amount of transmitted data by three
+magnitudes or more. Gzip is able to compress 1 GiB zeros to roughly 1 MB,
+lzma is even better::
+
+ $ dd if=/dev/zero bs=1M count=1024 | gzip > zeros.gz
+ $ dd if=/dev/zero bs=1M count=1024 | lzma -z > zeros.xy
+ $ ls -sh zeros.*
+ 1020K zeros.gz
+ 148K zeros.xy
+
+None of Python's standard XML libraries decompress streams except for
+``xmlrpclib``. The module is vulnerable
+to decompression bombs.
+
+lxml can load and process compressed data through libxml2 transparently.
+libxml2 can handle even very large blobs of compressed data efficiently
+without using too much memory. But it doesn't protect applications from
+decompression bombs. A carefully written SAX or iterparse-like approach can
+be safe.
+
+
+Processing Instruction
+----------------------
+
+`PI`_'s like::
+
+
+
+may impose more threats for XML processing. It depends if and how a
+processor handles processing instructions. The issue of URL retrieval with
+network or local file access apply to processing instructions, too.
+
+
+Other DTD features
+------------------
+
+`DTD`_ has more features like ````. I haven't researched how
+these features may be a security threat.
+
+
+XPath
+-----
+
+XPath statements may introduce DoS vulnerabilities. Code should never execute
+queries from untrusted sources. An attacker may also be able to create an XML
+document that makes certain XPath queries costly or resource hungry.
+
+
+XPath injection attacks
+-----------------------
+
+XPath injeciton attacks pretty much work like SQL injection attacks.
+Arguments to XPath queries must be quoted and validated properly, especially
+when they are taken from the user. The page `Avoid the dangers of XPath injection`_
+list some ramifications of XPath injections.
+
+Python's standard library doesn't have XPath support. Lxml supports
+parameterized XPath queries which does proper quoting. You just have to use
+its xpath() method correctly::
+
+ # DON'T
+ >>> tree.xpath("/tag[@id='%s']" % value)
+
+ # instead do
+ >>> tree.xpath("/tag[@id=$tagid]", tagid=name)
+
+
+XInclude
+--------
+
+`XML Inclusion`_ is another way to load and include external files::
+
+
+
+
+
+This feature should be disabled when XML files from an untrusted source are
+processed. Some Python XML libraries and libxml2 support XInclude but don't
+have an option to sandbox inclusion and limit it to allowed directories.
+
+
+XMLSchema location
+------------------
+
+A validating XML parser may download schema files from the information in a
+``xsi:schemaLocation`` attribute.
+
+::
+
+
+
+
+
+XSL Transformation
+------------------
+
+You should keep in mind that XSLT is a Turing complete language. Never
+process XSLT code from unknown or untrusted source! XSLT processors may
+allow you to interact with external resources in ways you can't even imagine.
+Some processors even support extensions that allow read/write access to file
+system, access to JRE objects or scripting with Jython.
+
+Example from `Attacking XML Security`_ for Xalan-J::
+
+
+
+
+
+
+
+
+
+
+
+Related CVEs
+============
+
+CVE-2013-1664
+ Unrestricted entity expansion induces DoS vulnerabilities in Python XML
+ libraries (XML bomb)
+
+CVE-2013-1665
+ External entity expansion in Python XML libraries inflicts potential
+ security flaws and DoS vulnerabilities
+
+
+Other languages / frameworks
+=============================
+
+Several other programming languages and frameworks are vulnerable as well. A
+couple of them are affected by the fact that libxml2 up to 2.9.0 has no
+protection against quadratic blowup attacks. Most of them have potential
+dangerous default settings for entity expansion and external entities, too.
+
+Perl
+----
+
+Perl's XML::Simple is vulnerable to quadratic entity expansion and external
+entity expansion (both local and remote).
+
+
+Ruby
+----
+
+Ruby's REXML document parser is vulnerable to entity expansion attacks
+(both quadratic and exponential) but it doesn't do external entity
+expansion by default. In order to counteract entity expansion you have to
+disable the feature::
+
+ REXML::Document.entity_expansion_limit = 0
+
+libxml-ruby and hpricot don't expand entities in their default configuration.
+
+
+PHP
+---
+
+PHP's SimpleXML API is vulnerable to quadratic entity expansion and loads
+entities from local and remote resources. The option ``LIBXML_NONET`` disables
+network access but still allows local file access. ``LIBXML_NOENT`` seems to
+have no effect on entity expansion in PHP 5.4.6.
+
+
+C# / .NET / Mono
+----------------
+
+Information in `XML DoS and Defenses (MSDN)`_ suggest that .NET is
+vulnerable with its default settings. The article contains code snippets
+how to create a secure XML reader::
+
+ XmlReaderSettings settings = new XmlReaderSettings();
+ settings.ProhibitDtd = false;
+ settings.MaxCharactersFromEntities = 1024;
+ settings.XmlResolver = null;
+ XmlReader reader = XmlReader.Create(stream, settings);
+
+
+Java
+----
+
+Untested. The documentation of Xerces and its `Xerces SecurityMananger`_
+sounds like Xerces is also vulnerable to billion laugh attacks with its
+default settings. It also does entity resolving when an
+``org.xml.sax.EntityResolver`` is configured. I'm not yet sure about the
+default setting here.
+
+Java specialists suggest to have a custom builder factory::
+
+ DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance();
+ builderFactory.setXIncludeAware(False);
+ builderFactory.setExpandEntityReferences(False);
+ builderFactory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, True);
+ # either
+ builderFactory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", True);
+ # or if you need DTDs
+ builderFactory.setFeature("http://xml.org/sax/features/external-general-entities", False);
+ builderFactory.setFeature("http://xml.org/sax/features/external-parameter-entities", False);
+ builderFactory.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", False);
+ builderFactory.setFeature("http://apache.org/xml/features/nonvalidating/load-dtd-grammar", False);
+
+
+TODO
+====
+
+* DOM: Use xml.dom.xmlbuilder options for entity handling
+* SAX: take feature_external_ges and feature_external_pes (?) into account
+* test experimental monkey patching of stdlib modules
+* improve documentation
+
+
+License
+=======
+
+Copyright (c) 2013-2017 by Christian Heimes
+
+Licensed to PSF under a Contributor Agreement.
+
+See https://www.python.org/psf/license for licensing details.
+
+
+Acknowledgements
+================
+
+Brett Cannon (Python Core developer)
+ review and code cleanup
+
+Antoine Pitrou (Python Core developer)
+ code review
+
+Aaron Patterson, Ben Murphy and Michael Koziarski (Ruby community)
+ Many thanks to Aaron, Ben and Michael from the Ruby community for their
+ report and assistance.
+
+Thierry Carrez (OpenStack)
+ Many thanks to Thierry for his report to the Python Security Response
+ Team on behalf of the OpenStack security team.
+
+Carl Meyer (Django)
+ Many thanks to Carl for his report to PSRT on behalf of the Django security
+ team.
+
+Daniel Veillard (libxml2)
+ Many thanks to Daniel for his insight and assistance with libxml2.
+
+semantics GmbH (https://www.semantics.de/)
+ Many thanks to my employer semantics for letting me work on the issue
+ during working hours as part of semantics's open source initiative.
+
+
+References
+==========
+
+* `XML DoS and Defenses (MSDN)`_
+* `Billion Laughs`_ on Wikipedia
+* `ZIP bomb`_ on Wikipedia
+* `Configure SAX parsers for secure processing`_
+* `Testing for XML Injection`_
+
+.. _defusedxml package: https://github.com/tiran/defusedxml
+.. _defusedxml on PyPI: https://pypi.python.org/pypi/defusedxml
+.. _defusedexpat package: https://github.com/tiran/defusedexpat
+.. _defusedexpat on PyPI: https://pypi.python.org/pypi/defusedexpat
+.. _modified expat: https://github.com/tiran/expat
+.. _expat parser: http://expat.sourceforge.net/
+.. _Attacking XML Security: https://www.isecpartners.com/media/12976/iSEC-HILL-Attacking-XML-Security-bh07.pdf
+.. _Billion Laughs: https://en.wikipedia.org/wiki/Billion_laughs
+.. _XML DoS and Defenses (MSDN): https://msdn.microsoft.com/en-us/magazine/ee335713.aspx
+.. _ZIP bomb: https://en.wikipedia.org/wiki/Zip_bomb
+.. _DTD: https://en.wikipedia.org/wiki/Document_Type_Definition
+.. _PI: https://en.wikipedia.org/wiki/Processing_Instruction
+.. _Avoid the dangers of XPath injection: http://www.ibm.com/developerworks/xml/library/x-xpathinjection/index.html
+.. _Configure SAX parsers for secure processing: http://www.ibm.com/developerworks/xml/library/x-tipcfsx/index.html
+.. _Testing for XML Injection: https://www.owasp.org/index.php/Testing_for_XML_Injection_(OWASP-DV-008)
+.. _Xerces SecurityMananger: https://xerces.apache.org/xerces2-j/javadocs/xerces2/org/apache/xerces/util/SecurityManager.html
+.. _XML Inclusion: https://www.w3.org/TR/xinclude/#include_element
+
+Changelog
+=========
+
+defusedxml 0.7.1
+---------------------
+
+*Release date: 08-Mar-2021*
+
+- Fix regression ``defusedxml.ElementTree.ParseError`` (#63)
+ The ``ParseError`` exception is now the same class object as
+ ``xml.etree.ElementTree.ParseError`` again.
+
+
+defusedxml 0.7.0
+----------------
+
+*Release date: 4-Mar-2021*
+
+- No changes
+
+
+defusedxml 0.7.0rc2
+-------------------
+
+*Release date: 12-Jan-2021*
+
+- Re-add and deprecate ``defusedxml.cElementTree``
+- Use GitHub Actions instead of TravisCI
+- Restore ``ElementTree`` attribute of ``xml.etree`` module after patching
+
+defusedxml 0.7.0rc1
+-------------------
+
+*Release date: 04-May-2020*
+
+- Add support for Python 3.9
+- ``defusedxml.cElementTree`` is not available with Python 3.9.
+- Python 2 is deprecate. Support for Python 2 will be removed in 0.8.0.
+
+
+defusedxml 0.6.0
+----------------
+
+*Release date: 17-Apr-2019*
+
+- Increase test coverage.
+- Add badges to README.
+
+
+defusedxml 0.6.0rc1
+-------------------
+
+*Release date: 14-Apr-2019*
+
+- Test on Python 3.7 stable and 3.8-dev
+- Drop support for Python 3.4
+- No longer pass *html* argument to XMLParse. It has been deprecated and
+ ignored for a long time. The DefusedXMLParser still takes a html argument.
+ A deprecation warning is issued when the argument is False and a TypeError
+ when it's True.
+- defusedxml now fails early when pyexpat stdlib module is not available or
+ broken.
+- defusedxml.ElementTree.__all__ now lists ParseError as public attribute.
+- The defusedxml.ElementTree and defusedxml.cElementTree modules had a typo
+ and used XMLParse instead of XMLParser as an alias for DefusedXMLParser.
+ Both the old and fixed name are now available.
+
+
+defusedxml 0.5.0
+----------------
+
+*Release date: 07-Feb-2017*
+
+- No changes
+
+
+defusedxml 0.5.0.rc1
+--------------------
+
+*Release date: 28-Jan-2017*
+
+- Add compatibility with Python 3.6
+- Drop support for Python 2.6, 3.1, 3.2, 3.3
+- Fix lxml tests (XMLSyntaxError: Detected an entity reference loop)
+
+
+defusedxml 0.4.1
+----------------
+
+*Release date: 28-Mar-2013*
+
+- Add more demo exploits, e.g. python_external.py and Xalan XSLT demos.
+- Improved documentation.
+
+
+defusedxml 0.4
+--------------
+
+*Release date: 25-Feb-2013*
+
+- As per http://seclists.org/oss-sec/2013/q1/340 please REJECT
+ CVE-2013-0278, CVE-2013-0279 and CVE-2013-0280 and use CVE-2013-1664,
+ CVE-2013-1665 for OpenStack/etc.
+- Add missing parser_list argument to sax.make_parser(). The argument is
+ ignored, though. (thanks to Florian Apolloner)
+- Add demo exploit for external entity attack on Python's SAX parser, XML-RPC
+ and WebDAV.
+
+
+defusedxml 0.3
+--------------
+
+*Release date: 19-Feb-2013*
+
+- Improve documentation
+
+
+defusedxml 0.2
+--------------
+
+*Release date: 15-Feb-2013*
+
+- Rename ExternalEntitiesForbidden to ExternalReferenceForbidden
+- Rename defusedxml.lxml.check_dtd() to check_docinfo()
+- Unify argument names in callbacks
+- Add arguments and formatted representation to exceptions
+- Add forbid_external argument to all functions and classes
+- More tests
+- LOTS of documentation
+- Add example code for other languages (Ruby, Perl, PHP) and parsers (Genshi)
+- Add protection against XML and gzip attacks to xmlrpclib
+
+defusedxml 0.1
+--------------
+
+*Release date: 08-Feb-2013*
+
+- Initial and internal release for PSRT review
+
+
diff --git a/Voltage Vikings/lib/python3.9/site-packages/defusedxml-0.7.1.dist-info/RECORD b/Voltage Vikings/lib/python3.9/site-packages/defusedxml-0.7.1.dist-info/RECORD
new file mode 100644
index 00000000..3b4cca42
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/defusedxml-0.7.1.dist-info/RECORD
@@ -0,0 +1,28 @@
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/defusedxml/ElementTree.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/defusedxml/__init__.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/defusedxml/cElementTree.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/defusedxml/common.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/defusedxml/expatbuilder.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/defusedxml/expatreader.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/defusedxml/lxml.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/defusedxml/minidom.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/defusedxml/pulldom.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/defusedxml/sax.cpython-39.pyc,,
+../../../../../Library/Caches/com.apple.python/Users/sagar/Desktop/S06/lib/python3.9/site-packages/defusedxml/xmlrpc.cpython-39.pyc,,
+defusedxml-0.7.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+defusedxml-0.7.1.dist-info/LICENSE,sha256=uAzp2oxCofkQeWJ_u-K_JyEK4Qig_-Xwd9WwjgdsJMg,2409
+defusedxml-0.7.1.dist-info/METADATA,sha256=Np0872SHDa-En7pxHLjQWn7-PI2asPdjrcNAef43i7E,32518
+defusedxml-0.7.1.dist-info/RECORD,,
+defusedxml-0.7.1.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+defusedxml-0.7.1.dist-info/top_level.txt,sha256=QGHa90F50pVKhWSFlERI0jtSKtqDiGyfeZX7dQNZAAw,11
+defusedxml/ElementTree.py,sha256=GLSqpCz58oXGPGyzf_HylsPS9_dcGVP5SN4dK7yvyPw,4640
+defusedxml/__init__.py,sha256=RczeaVJG64p2Fgy1jlCzbuRdchEPnEaCBrxgk8JJ_pM,1444
+defusedxml/cElementTree.py,sha256=PpaKMh3rU29sY8amAK4fzHQKl8gcAYD0h1LCoW62Rtk,1449
+defusedxml/common.py,sha256=3d26jNW4fNXzgjWhvUfs83Afiz5EVxFDupQbugkSMZc,4036
+defusedxml/expatbuilder.py,sha256=b4Q05vsBMJ5StkiTFf4my2rGGo1gZyEl_hC5MeFTOAA,3732
+defusedxml/expatreader.py,sha256=KOpSrwkSvj5SGOY9pTXOM26Dnz00rsJt33WueVvzpvc,2196
+defusedxml/lxml.py,sha256=HW-LFKdrfMRzHdi0Vcucq4-n8yz7v_OQwEQWFg1JQYA,4940
+defusedxml/minidom.py,sha256=3QcgygVwJqcWDQ3IZ2iol8zsH4cx3BRX70SPcd0bG2g,1884
+defusedxml/pulldom.py,sha256=DYj2D2lc7xoxZ38gfzujXmdznd8ovzDqGFXqyXbtxjk,1170
+defusedxml/sax.py,sha256=-SF08Msc2mWEYAMw62pJ5FMwWccOctFSnQwDLYLLlVE,1477
+defusedxml/xmlrpc.py,sha256=7rZQey3tqXcc1hrrM3RprOICU6fiFny9B9l4nmTioxA,5364
diff --git a/Voltage Vikings/lib/python3.9/site-packages/defusedxml-0.7.1.dist-info/WHEEL b/Voltage Vikings/lib/python3.9/site-packages/defusedxml-0.7.1.dist-info/WHEEL
new file mode 100644
index 00000000..ef99c6cf
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/defusedxml-0.7.1.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/Voltage Vikings/lib/python3.9/site-packages/defusedxml-0.7.1.dist-info/top_level.txt b/Voltage Vikings/lib/python3.9/site-packages/defusedxml-0.7.1.dist-info/top_level.txt
new file mode 100644
index 00000000..36969f2c
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/defusedxml-0.7.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+defusedxml
diff --git a/Voltage Vikings/lib/python3.9/site-packages/defusedxml/ElementTree.py b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/ElementTree.py
new file mode 100644
index 00000000..5ba765f1
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/ElementTree.py
@@ -0,0 +1,154 @@
+# defusedxml
+#
+# Copyright (c) 2013 by Christian Heimes
+# Licensed to PSF under a Contributor Agreement.
+# See https://www.python.org/psf/license for licensing details.
+"""Defused xml.etree.ElementTree facade
+"""
+from __future__ import print_function, absolute_import
+
+import sys
+import warnings
+from xml.etree.ElementTree import ParseError
+from xml.etree.ElementTree import TreeBuilder as _TreeBuilder
+from xml.etree.ElementTree import parse as _parse
+from xml.etree.ElementTree import tostring
+
+from .common import PY3
+
+if PY3:
+ import importlib
+else:
+ from xml.etree.ElementTree import XMLParser as _XMLParser
+ from xml.etree.ElementTree import iterparse as _iterparse
+
+
+from .common import (
+ DTDForbidden,
+ EntitiesForbidden,
+ ExternalReferenceForbidden,
+ _generate_etree_functions,
+)
+
+__origin__ = "xml.etree.ElementTree"
+
+
+def _get_py3_cls():
+ """Python 3.3 hides the pure Python code but defusedxml requires it.
+
+ The code is based on test.support.import_fresh_module().
+ """
+ pymodname = "xml.etree.ElementTree"
+ cmodname = "_elementtree"
+
+ pymod = sys.modules.pop(pymodname, None)
+ cmod = sys.modules.pop(cmodname, None)
+
+ sys.modules[cmodname] = None
+ try:
+ pure_pymod = importlib.import_module(pymodname)
+ finally:
+ # restore module
+ sys.modules[pymodname] = pymod
+ if cmod is not None:
+ sys.modules[cmodname] = cmod
+ else:
+ sys.modules.pop(cmodname, None)
+ # restore attribute on original package
+ etree_pkg = sys.modules["xml.etree"]
+ if pymod is not None:
+ etree_pkg.ElementTree = pymod
+ elif hasattr(etree_pkg, "ElementTree"):
+ del etree_pkg.ElementTree
+
+ _XMLParser = pure_pymod.XMLParser
+ _iterparse = pure_pymod.iterparse
+ # patch pure module to use ParseError from C extension
+ pure_pymod.ParseError = ParseError
+
+ return _XMLParser, _iterparse
+
+
+if PY3:
+ _XMLParser, _iterparse = _get_py3_cls()
+
+
+_sentinel = object()
+
+
+class DefusedXMLParser(_XMLParser):
+ def __init__(
+ self,
+ html=_sentinel,
+ target=None,
+ encoding=None,
+ forbid_dtd=False,
+ forbid_entities=True,
+ forbid_external=True,
+ ):
+ # Python 2.x old style class
+ _XMLParser.__init__(self, target=target, encoding=encoding)
+ if html is not _sentinel:
+ # the 'html' argument has been deprecated and ignored in all
+ # supported versions of Python. Python 3.8 finally removed it.
+ if html:
+ raise TypeError("'html=True' is no longer supported.")
+ else:
+ warnings.warn(
+ "'html' keyword argument is no longer supported. Pass "
+ "in arguments as keyword arguments.",
+ category=DeprecationWarning,
+ )
+
+ self.forbid_dtd = forbid_dtd
+ self.forbid_entities = forbid_entities
+ self.forbid_external = forbid_external
+ if PY3:
+ parser = self.parser
+ else:
+ parser = self._parser
+ if self.forbid_dtd:
+ parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl
+ if self.forbid_entities:
+ parser.EntityDeclHandler = self.defused_entity_decl
+ parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl
+ if self.forbid_external:
+ parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler
+
+ def defused_start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
+ raise DTDForbidden(name, sysid, pubid)
+
+ def defused_entity_decl(
+ self, name, is_parameter_entity, value, base, sysid, pubid, notation_name
+ ):
+ raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
+
+ def defused_unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
+ # expat 1.2
+ raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name) # pragma: no cover
+
+ def defused_external_entity_ref_handler(self, context, base, sysid, pubid):
+ raise ExternalReferenceForbidden(context, base, sysid, pubid)
+
+
+# aliases
+# XMLParse is a typo, keep it for backwards compatibility
+XMLTreeBuilder = XMLParse = XMLParser = DefusedXMLParser
+
+parse, iterparse, fromstring = _generate_etree_functions(
+ DefusedXMLParser, _TreeBuilder, _parse, _iterparse
+)
+XML = fromstring
+
+
+__all__ = [
+ "ParseError",
+ "XML",
+ "XMLParse",
+ "XMLParser",
+ "XMLTreeBuilder",
+ "fromstring",
+ "iterparse",
+ "parse",
+ "tostring",
+]
diff --git a/Voltage Vikings/lib/python3.9/site-packages/defusedxml/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/__init__.py
new file mode 100644
index 00000000..4b5a2300
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/__init__.py
@@ -0,0 +1,67 @@
+# defusedxml
+#
+# Copyright (c) 2013 by Christian Heimes
+# Licensed to PSF under a Contributor Agreement.
+# See https://www.python.org/psf/license for licensing details.
+"""Defuse XML bomb denial of service vulnerabilities
+"""
+from __future__ import print_function, absolute_import
+
+import warnings
+
+from .common import (
+ DefusedXmlException,
+ DTDForbidden,
+ EntitiesForbidden,
+ ExternalReferenceForbidden,
+ NotSupportedError,
+ _apply_defusing,
+)
+
+
+def defuse_stdlib():
+ """Monkey patch and defuse all stdlib packages
+
+ :warning: The monkey patch is an EXPERIMETNAL feature.
+ """
+ defused = {}
+
+ with warnings.catch_warnings():
+ from . import cElementTree
+ from . import ElementTree
+ from . import minidom
+ from . import pulldom
+ from . import sax
+ from . import expatbuilder
+ from . import expatreader
+ from . import xmlrpc
+
+ xmlrpc.monkey_patch()
+ defused[xmlrpc] = None
+
+ defused_mods = [
+ cElementTree,
+ ElementTree,
+ minidom,
+ pulldom,
+ sax,
+ expatbuilder,
+ expatreader,
+ ]
+
+ for defused_mod in defused_mods:
+ stdlib_mod = _apply_defusing(defused_mod)
+ defused[defused_mod] = stdlib_mod
+
+ return defused
+
+
+__version__ = "0.7.1"
+
+__all__ = [
+ "DefusedXmlException",
+ "DTDForbidden",
+ "EntitiesForbidden",
+ "ExternalReferenceForbidden",
+ "NotSupportedError",
+]
diff --git a/Voltage Vikings/lib/python3.9/site-packages/defusedxml/cElementTree.py b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/cElementTree.py
new file mode 100644
index 00000000..84670c68
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/cElementTree.py
@@ -0,0 +1,62 @@
+# defusedxml
+#
+# Copyright (c) 2013 by Christian Heimes
+# Licensed to PSF under a Contributor Agreement.
+# See https://www.python.org/psf/license for licensing details.
+"""Defused xml.etree.cElementTree
+"""
+from __future__ import absolute_import
+
+import warnings
+
+from .common import _generate_etree_functions
+
+from xml.etree.cElementTree import TreeBuilder as _TreeBuilder
+from xml.etree.cElementTree import parse as _parse
+from xml.etree.cElementTree import tostring
+
+# iterparse from ElementTree!
+from xml.etree.ElementTree import iterparse as _iterparse
+
+# This module is an alias for ElementTree just like xml.etree.cElementTree
+from .ElementTree import (
+ XML,
+ XMLParse,
+ XMLParser,
+ XMLTreeBuilder,
+ fromstring,
+ iterparse,
+ parse,
+ tostring,
+ DefusedXMLParser,
+ ParseError,
+)
+
+__origin__ = "xml.etree.cElementTree"
+
+
+warnings.warn(
+ "defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead.",
+ category=DeprecationWarning,
+ stacklevel=2,
+)
+
+# XMLParse is a typo, keep it for backwards compatibility
+XMLTreeBuilder = XMLParse = XMLParser = DefusedXMLParser
+
+parse, iterparse, fromstring = _generate_etree_functions(
+ DefusedXMLParser, _TreeBuilder, _parse, _iterparse
+)
+XML = fromstring
+
+__all__ = [
+ "ParseError",
+ "XML",
+ "XMLParse",
+ "XMLParser",
+ "XMLTreeBuilder",
+ "fromstring",
+ "iterparse",
+ "parse",
+ "tostring",
+]
diff --git a/Voltage Vikings/lib/python3.9/site-packages/defusedxml/common.py b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/common.py
new file mode 100644
index 00000000..5ceda1fb
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/common.py
@@ -0,0 +1,129 @@
+# defusedxml
+#
+# Copyright (c) 2013 by Christian Heimes
+# Licensed to PSF under a Contributor Agreement.
+# See https://www.python.org/psf/license for licensing details.
+"""Common constants, exceptions and helpe functions
+"""
+import sys
+import xml.parsers.expat
+
+PY3 = sys.version_info[0] == 3
+
+# Fail early when pyexpat is not installed correctly
+if not hasattr(xml.parsers.expat, "ParserCreate"):
+ raise ImportError("pyexpat") # pragma: no cover
+
+
+class DefusedXmlException(ValueError):
+ """Base exception"""
+
+ def __repr__(self):
+ return str(self)
+
+
+class DTDForbidden(DefusedXmlException):
+ """Document type definition is forbidden"""
+
+ def __init__(self, name, sysid, pubid):
+ super(DTDForbidden, self).__init__()
+ self.name = name
+ self.sysid = sysid
+ self.pubid = pubid
+
+ def __str__(self):
+ tpl = "DTDForbidden(name='{}', system_id={!r}, public_id={!r})"
+ return tpl.format(self.name, self.sysid, self.pubid)
+
+
+class EntitiesForbidden(DefusedXmlException):
+ """Entity definition is forbidden"""
+
+ def __init__(self, name, value, base, sysid, pubid, notation_name):
+ super(EntitiesForbidden, self).__init__()
+ self.name = name
+ self.value = value
+ self.base = base
+ self.sysid = sysid
+ self.pubid = pubid
+ self.notation_name = notation_name
+
+ def __str__(self):
+ tpl = "EntitiesForbidden(name='{}', system_id={!r}, public_id={!r})"
+ return tpl.format(self.name, self.sysid, self.pubid)
+
+
+class ExternalReferenceForbidden(DefusedXmlException):
+ """Resolving an external reference is forbidden"""
+
+ def __init__(self, context, base, sysid, pubid):
+ super(ExternalReferenceForbidden, self).__init__()
+ self.context = context
+ self.base = base
+ self.sysid = sysid
+ self.pubid = pubid
+
+ def __str__(self):
+ tpl = "ExternalReferenceForbidden(system_id='{}', public_id={})"
+ return tpl.format(self.sysid, self.pubid)
+
+
+class NotSupportedError(DefusedXmlException):
+ """The operation is not supported"""
+
+
+def _apply_defusing(defused_mod):
+ assert defused_mod is sys.modules[defused_mod.__name__]
+ stdlib_name = defused_mod.__origin__
+ __import__(stdlib_name, {}, {}, ["*"])
+ stdlib_mod = sys.modules[stdlib_name]
+ stdlib_names = set(dir(stdlib_mod))
+ for name, obj in vars(defused_mod).items():
+ if name.startswith("_") or name not in stdlib_names:
+ continue
+ setattr(stdlib_mod, name, obj)
+ return stdlib_mod
+
+
+def _generate_etree_functions(DefusedXMLParser, _TreeBuilder, _parse, _iterparse):
+ """Factory for functions needed by etree, dependent on whether
+ cElementTree or ElementTree is used."""
+
+ def parse(source, parser=None, forbid_dtd=False, forbid_entities=True, forbid_external=True):
+ if parser is None:
+ parser = DefusedXMLParser(
+ target=_TreeBuilder(),
+ forbid_dtd=forbid_dtd,
+ forbid_entities=forbid_entities,
+ forbid_external=forbid_external,
+ )
+ return _parse(source, parser)
+
+ def iterparse(
+ source,
+ events=None,
+ parser=None,
+ forbid_dtd=False,
+ forbid_entities=True,
+ forbid_external=True,
+ ):
+ if parser is None:
+ parser = DefusedXMLParser(
+ target=_TreeBuilder(),
+ forbid_dtd=forbid_dtd,
+ forbid_entities=forbid_entities,
+ forbid_external=forbid_external,
+ )
+ return _iterparse(source, events, parser)
+
+ def fromstring(text, forbid_dtd=False, forbid_entities=True, forbid_external=True):
+ parser = DefusedXMLParser(
+ target=_TreeBuilder(),
+ forbid_dtd=forbid_dtd,
+ forbid_entities=forbid_entities,
+ forbid_external=forbid_external,
+ )
+ parser.feed(text)
+ return parser.close()
+
+ return parse, iterparse, fromstring
diff --git a/Voltage Vikings/lib/python3.9/site-packages/defusedxml/expatbuilder.py b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/expatbuilder.py
new file mode 100644
index 00000000..7bfc57e4
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/expatbuilder.py
@@ -0,0 +1,107 @@
+# defusedxml
+#
+# Copyright (c) 2013 by Christian Heimes
+# Licensed to PSF under a Contributor Agreement.
+# See https://www.python.org/psf/license for licensing details.
+"""Defused xml.dom.expatbuilder
+"""
+from __future__ import print_function, absolute_import
+
+from xml.dom.expatbuilder import ExpatBuilder as _ExpatBuilder
+from xml.dom.expatbuilder import Namespaces as _Namespaces
+
+from .common import DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden
+
+__origin__ = "xml.dom.expatbuilder"
+
+
+class DefusedExpatBuilder(_ExpatBuilder):
+ """Defused document builder"""
+
+ def __init__(
+ self, options=None, forbid_dtd=False, forbid_entities=True, forbid_external=True
+ ):
+ _ExpatBuilder.__init__(self, options)
+ self.forbid_dtd = forbid_dtd
+ self.forbid_entities = forbid_entities
+ self.forbid_external = forbid_external
+
+ def defused_start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
+ raise DTDForbidden(name, sysid, pubid)
+
+ def defused_entity_decl(
+ self, name, is_parameter_entity, value, base, sysid, pubid, notation_name
+ ):
+ raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
+
+ def defused_unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
+ # expat 1.2
+ raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name) # pragma: no cover
+
+ def defused_external_entity_ref_handler(self, context, base, sysid, pubid):
+ raise ExternalReferenceForbidden(context, base, sysid, pubid)
+
+ def install(self, parser):
+ _ExpatBuilder.install(self, parser)
+
+ if self.forbid_dtd:
+ parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl
+ if self.forbid_entities:
+ # if self._options.entities:
+ parser.EntityDeclHandler = self.defused_entity_decl
+ parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl
+ if self.forbid_external:
+ parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler
+
+
+class DefusedExpatBuilderNS(_Namespaces, DefusedExpatBuilder):
+ """Defused document builder that supports namespaces."""
+
+ def install(self, parser):
+ DefusedExpatBuilder.install(self, parser)
+ if self._options.namespace_declarations:
+ parser.StartNamespaceDeclHandler = self.start_namespace_decl_handler
+
+ def reset(self):
+ DefusedExpatBuilder.reset(self)
+ self._initNamespaces()
+
+
+def parse(file, namespaces=True, forbid_dtd=False, forbid_entities=True, forbid_external=True):
+ """Parse a document, returning the resulting Document node.
+
+ 'file' may be either a file name or an open file object.
+ """
+ if namespaces:
+ build_builder = DefusedExpatBuilderNS
+ else:
+ build_builder = DefusedExpatBuilder
+ builder = build_builder(
+ forbid_dtd=forbid_dtd, forbid_entities=forbid_entities, forbid_external=forbid_external
+ )
+
+ if isinstance(file, str):
+ fp = open(file, "rb")
+ try:
+ result = builder.parseFile(fp)
+ finally:
+ fp.close()
+ else:
+ result = builder.parseFile(file)
+ return result
+
+
+def parseString(
+ string, namespaces=True, forbid_dtd=False, forbid_entities=True, forbid_external=True
+):
+ """Parse a document from a string, returning the resulting
+ Document node.
+ """
+ if namespaces:
+ build_builder = DefusedExpatBuilderNS
+ else:
+ build_builder = DefusedExpatBuilder
+ builder = build_builder(
+ forbid_dtd=forbid_dtd, forbid_entities=forbid_entities, forbid_external=forbid_external
+ )
+ return builder.parseString(string)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/defusedxml/expatreader.py b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/expatreader.py
new file mode 100644
index 00000000..890e1d16
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/expatreader.py
@@ -0,0 +1,61 @@
+# defusedxml
+#
+# Copyright (c) 2013 by Christian Heimes
+# Licensed to PSF under a Contributor Agreement.
+# See https://www.python.org/psf/license for licensing details.
+"""Defused xml.sax.expatreader
+"""
+from __future__ import print_function, absolute_import
+
+from xml.sax.expatreader import ExpatParser as _ExpatParser
+
+from .common import DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden
+
+__origin__ = "xml.sax.expatreader"
+
+
+class DefusedExpatParser(_ExpatParser):
+ """Defused SAX driver for the pyexpat C module."""
+
+ def __init__(
+ self,
+ namespaceHandling=0,
+ bufsize=2 ** 16 - 20,
+ forbid_dtd=False,
+ forbid_entities=True,
+ forbid_external=True,
+ ):
+ _ExpatParser.__init__(self, namespaceHandling, bufsize)
+ self.forbid_dtd = forbid_dtd
+ self.forbid_entities = forbid_entities
+ self.forbid_external = forbid_external
+
+ def defused_start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
+ raise DTDForbidden(name, sysid, pubid)
+
+ def defused_entity_decl(
+ self, name, is_parameter_entity, value, base, sysid, pubid, notation_name
+ ):
+ raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
+
+ def defused_unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
+ # expat 1.2
+ raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name) # pragma: no cover
+
+ def defused_external_entity_ref_handler(self, context, base, sysid, pubid):
+ raise ExternalReferenceForbidden(context, base, sysid, pubid)
+
+ def reset(self):
+ _ExpatParser.reset(self)
+ parser = self._parser
+ if self.forbid_dtd:
+ parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl
+ if self.forbid_entities:
+ parser.EntityDeclHandler = self.defused_entity_decl
+ parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl
+ if self.forbid_external:
+ parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler
+
+
+def create_parser(*args, **kwargs):
+ return DefusedExpatParser(*args, **kwargs)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/defusedxml/lxml.py b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/lxml.py
new file mode 100644
index 00000000..99d5be93
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/lxml.py
@@ -0,0 +1,153 @@
+# defusedxml
+#
+# Copyright (c) 2013 by Christian Heimes
+# Licensed to PSF under a Contributor Agreement.
+# See https://www.python.org/psf/license for licensing details.
+"""DEPRECATED Example code for lxml.etree protection
+
+The code has NO protection against decompression bombs.
+"""
+from __future__ import print_function, absolute_import
+
+import threading
+import warnings
+
+from lxml import etree as _etree
+
+from .common import DTDForbidden, EntitiesForbidden, NotSupportedError
+
+LXML3 = _etree.LXML_VERSION[0] >= 3
+
+__origin__ = "lxml.etree"
+
+tostring = _etree.tostring
+
+
+warnings.warn(
+ "defusedxml.lxml is no longer supported and will be removed in a future release.",
+ category=DeprecationWarning,
+ stacklevel=2,
+)
+
+
+class RestrictedElement(_etree.ElementBase):
+ """A restricted Element class that filters out instances of some classes"""
+
+ __slots__ = ()
+ # blacklist = (etree._Entity, etree._ProcessingInstruction, etree._Comment)
+ blacklist = _etree._Entity
+
+ def _filter(self, iterator):
+ blacklist = self.blacklist
+ for child in iterator:
+ if isinstance(child, blacklist):
+ continue
+ yield child
+
+ def __iter__(self):
+ iterator = super(RestrictedElement, self).__iter__()
+ return self._filter(iterator)
+
+ def iterchildren(self, tag=None, reversed=False):
+ iterator = super(RestrictedElement, self).iterchildren(tag=tag, reversed=reversed)
+ return self._filter(iterator)
+
+ def iter(self, tag=None, *tags):
+ iterator = super(RestrictedElement, self).iter(tag=tag, *tags)
+ return self._filter(iterator)
+
+ def iterdescendants(self, tag=None, *tags):
+ iterator = super(RestrictedElement, self).iterdescendants(tag=tag, *tags)
+ return self._filter(iterator)
+
+ def itersiblings(self, tag=None, preceding=False):
+ iterator = super(RestrictedElement, self).itersiblings(tag=tag, preceding=preceding)
+ return self._filter(iterator)
+
+ def getchildren(self):
+ iterator = super(RestrictedElement, self).__iter__()
+ return list(self._filter(iterator))
+
+ def getiterator(self, tag=None):
+ iterator = super(RestrictedElement, self).getiterator(tag)
+ return self._filter(iterator)
+
+
+class GlobalParserTLS(threading.local):
+ """Thread local context for custom parser instances"""
+
+ parser_config = {
+ "resolve_entities": False,
+ # 'remove_comments': True,
+ # 'remove_pis': True,
+ }
+
+ element_class = RestrictedElement
+
+ def createDefaultParser(self):
+ parser = _etree.XMLParser(**self.parser_config)
+ element_class = self.element_class
+ if self.element_class is not None:
+ lookup = _etree.ElementDefaultClassLookup(element=element_class)
+ parser.set_element_class_lookup(lookup)
+ return parser
+
+ def setDefaultParser(self, parser):
+ self._default_parser = parser
+
+ def getDefaultParser(self):
+ parser = getattr(self, "_default_parser", None)
+ if parser is None:
+ parser = self.createDefaultParser()
+ self.setDefaultParser(parser)
+ return parser
+
+
+_parser_tls = GlobalParserTLS()
+getDefaultParser = _parser_tls.getDefaultParser
+
+
+def check_docinfo(elementtree, forbid_dtd=False, forbid_entities=True):
+ """Check docinfo of an element tree for DTD and entity declarations
+
+ The check for entity declarations needs lxml 3 or newer. lxml 2.x does
+ not support dtd.iterentities().
+ """
+ docinfo = elementtree.docinfo
+ if docinfo.doctype:
+ if forbid_dtd:
+ raise DTDForbidden(docinfo.doctype, docinfo.system_url, docinfo.public_id)
+ if forbid_entities and not LXML3:
+ # lxml < 3 has no iterentities()
+ raise NotSupportedError("Unable to check for entity declarations " "in lxml 2.x")
+
+ if forbid_entities:
+ for dtd in docinfo.internalDTD, docinfo.externalDTD:
+ if dtd is None:
+ continue
+ for entity in dtd.iterentities():
+ raise EntitiesForbidden(entity.name, entity.content, None, None, None, None)
+
+
+def parse(source, parser=None, base_url=None, forbid_dtd=False, forbid_entities=True):
+ if parser is None:
+ parser = getDefaultParser()
+ elementtree = _etree.parse(source, parser, base_url=base_url)
+ check_docinfo(elementtree, forbid_dtd, forbid_entities)
+ return elementtree
+
+
+def fromstring(text, parser=None, base_url=None, forbid_dtd=False, forbid_entities=True):
+ if parser is None:
+ parser = getDefaultParser()
+ rootelement = _etree.fromstring(text, parser, base_url=base_url)
+ elementtree = rootelement.getroottree()
+ check_docinfo(elementtree, forbid_dtd, forbid_entities)
+ return rootelement
+
+
+XML = fromstring
+
+
+def iterparse(*args, **kwargs):
+ raise NotSupportedError("defused lxml.etree.iterparse not available")
diff --git a/Voltage Vikings/lib/python3.9/site-packages/defusedxml/minidom.py b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/minidom.py
new file mode 100644
index 00000000..78033b6c
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/minidom.py
@@ -0,0 +1,63 @@
+# defusedxml
+#
+# Copyright (c) 2013 by Christian Heimes
+# Licensed to PSF under a Contributor Agreement.
+# See https://www.python.org/psf/license for licensing details.
+"""Defused xml.dom.minidom
+"""
+from __future__ import print_function, absolute_import
+
+from xml.dom.minidom import _do_pulldom_parse
+from . import expatbuilder as _expatbuilder
+from . import pulldom as _pulldom
+
+__origin__ = "xml.dom.minidom"
+
+
+def parse(
+ file, parser=None, bufsize=None, forbid_dtd=False, forbid_entities=True, forbid_external=True
+):
+ """Parse a file into a DOM by filename or file object."""
+ if parser is None and not bufsize:
+ return _expatbuilder.parse(
+ file,
+ forbid_dtd=forbid_dtd,
+ forbid_entities=forbid_entities,
+ forbid_external=forbid_external,
+ )
+ else:
+ return _do_pulldom_parse(
+ _pulldom.parse,
+ (file,),
+ {
+ "parser": parser,
+ "bufsize": bufsize,
+ "forbid_dtd": forbid_dtd,
+ "forbid_entities": forbid_entities,
+ "forbid_external": forbid_external,
+ },
+ )
+
+
+def parseString(
+ string, parser=None, forbid_dtd=False, forbid_entities=True, forbid_external=True
+):
+ """Parse a file into a DOM from a string."""
+ if parser is None:
+ return _expatbuilder.parseString(
+ string,
+ forbid_dtd=forbid_dtd,
+ forbid_entities=forbid_entities,
+ forbid_external=forbid_external,
+ )
+ else:
+ return _do_pulldom_parse(
+ _pulldom.parseString,
+ (string,),
+ {
+ "parser": parser,
+ "forbid_dtd": forbid_dtd,
+ "forbid_entities": forbid_entities,
+ "forbid_external": forbid_external,
+ },
+ )
diff --git a/Voltage Vikings/lib/python3.9/site-packages/defusedxml/pulldom.py b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/pulldom.py
new file mode 100644
index 00000000..e3b10a46
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/pulldom.py
@@ -0,0 +1,41 @@
+# defusedxml
+#
+# Copyright (c) 2013 by Christian Heimes
+# Licensed to PSF under a Contributor Agreement.
+# See https://www.python.org/psf/license for licensing details.
+"""Defused xml.dom.pulldom
+"""
+from __future__ import print_function, absolute_import
+
+from xml.dom.pulldom import parse as _parse
+from xml.dom.pulldom import parseString as _parseString
+from .sax import make_parser
+
+__origin__ = "xml.dom.pulldom"
+
+
+def parse(
+ stream_or_string,
+ parser=None,
+ bufsize=None,
+ forbid_dtd=False,
+ forbid_entities=True,
+ forbid_external=True,
+):
+ if parser is None:
+ parser = make_parser()
+ parser.forbid_dtd = forbid_dtd
+ parser.forbid_entities = forbid_entities
+ parser.forbid_external = forbid_external
+ return _parse(stream_or_string, parser, bufsize)
+
+
+def parseString(
+ string, parser=None, forbid_dtd=False, forbid_entities=True, forbid_external=True
+):
+ if parser is None:
+ parser = make_parser()
+ parser.forbid_dtd = forbid_dtd
+ parser.forbid_entities = forbid_entities
+ parser.forbid_external = forbid_external
+ return _parseString(string, parser)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/defusedxml/sax.py b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/sax.py
new file mode 100644
index 00000000..b2786f74
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/sax.py
@@ -0,0 +1,60 @@
+# defusedxml
+#
+# Copyright (c) 2013 by Christian Heimes
+# Licensed to PSF under a Contributor Agreement.
+# See https://www.python.org/psf/license for licensing details.
+"""Defused xml.sax
+"""
+from __future__ import print_function, absolute_import
+
+from xml.sax import InputSource as _InputSource
+from xml.sax import ErrorHandler as _ErrorHandler
+
+from . import expatreader
+
+__origin__ = "xml.sax"
+
+
+def parse(
+ source,
+ handler,
+ errorHandler=_ErrorHandler(),
+ forbid_dtd=False,
+ forbid_entities=True,
+ forbid_external=True,
+):
+ parser = make_parser()
+ parser.setContentHandler(handler)
+ parser.setErrorHandler(errorHandler)
+ parser.forbid_dtd = forbid_dtd
+ parser.forbid_entities = forbid_entities
+ parser.forbid_external = forbid_external
+ parser.parse(source)
+
+
+def parseString(
+ string,
+ handler,
+ errorHandler=_ErrorHandler(),
+ forbid_dtd=False,
+ forbid_entities=True,
+ forbid_external=True,
+):
+ from io import BytesIO
+
+ if errorHandler is None:
+ errorHandler = _ErrorHandler()
+ parser = make_parser()
+ parser.setContentHandler(handler)
+ parser.setErrorHandler(errorHandler)
+ parser.forbid_dtd = forbid_dtd
+ parser.forbid_entities = forbid_entities
+ parser.forbid_external = forbid_external
+
+ inpsrc = _InputSource()
+ inpsrc.setByteStream(BytesIO(string))
+ parser.parse(inpsrc)
+
+
+def make_parser(parser_list=[]):
+ return expatreader.create_parser()
diff --git a/Voltage Vikings/lib/python3.9/site-packages/defusedxml/xmlrpc.py b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/xmlrpc.py
new file mode 100644
index 00000000..fbc674da
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/defusedxml/xmlrpc.py
@@ -0,0 +1,153 @@
+# defusedxml
+#
+# Copyright (c) 2013 by Christian Heimes
+# Licensed to PSF under a Contributor Agreement.
+# See https://www.python.org/psf/license for licensing details.
+"""Defused xmlrpclib
+
+Also defuses gzip bomb
+"""
+from __future__ import print_function, absolute_import
+
+import io
+
+from .common import DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden, PY3
+
+if PY3:
+ __origin__ = "xmlrpc.client"
+ from xmlrpc.client import ExpatParser
+ from xmlrpc import client as xmlrpc_client
+ from xmlrpc import server as xmlrpc_server
+ from xmlrpc.client import gzip_decode as _orig_gzip_decode
+ from xmlrpc.client import GzipDecodedResponse as _OrigGzipDecodedResponse
+else:
+ __origin__ = "xmlrpclib"
+ from xmlrpclib import ExpatParser
+ import xmlrpclib as xmlrpc_client
+
+ xmlrpc_server = None
+ from xmlrpclib import gzip_decode as _orig_gzip_decode
+ from xmlrpclib import GzipDecodedResponse as _OrigGzipDecodedResponse
+
+try:
+ import gzip
+except ImportError: # pragma: no cover
+ gzip = None
+
+
+# Limit maximum request size to prevent resource exhaustion DoS
+# Also used to limit maximum amount of gzip decoded data in order to prevent
+# decompression bombs
+# A value of -1 or smaller disables the limit
+MAX_DATA = 30 * 1024 * 1024 # 30 MB
+
+
+def defused_gzip_decode(data, limit=None):
+ """gzip encoded data -> unencoded data
+
+ Decode data using the gzip content encoding as described in RFC 1952
+ """
+ if not gzip: # pragma: no cover
+ raise NotImplementedError
+ if limit is None:
+ limit = MAX_DATA
+ f = io.BytesIO(data)
+ gzf = gzip.GzipFile(mode="rb", fileobj=f)
+ try:
+ if limit < 0: # no limit
+ decoded = gzf.read()
+ else:
+ decoded = gzf.read(limit + 1)
+ except IOError: # pragma: no cover
+ raise ValueError("invalid data")
+ f.close()
+ gzf.close()
+ if limit >= 0 and len(decoded) > limit:
+ raise ValueError("max gzipped payload length exceeded")
+ return decoded
+
+
+class DefusedGzipDecodedResponse(gzip.GzipFile if gzip else object):
+ """a file-like object to decode a response encoded with the gzip
+ method, as described in RFC 1952.
+ """
+
+ def __init__(self, response, limit=None):
+ # response doesn't support tell() and read(), required by
+ # GzipFile
+ if not gzip: # pragma: no cover
+ raise NotImplementedError
+ self.limit = limit = limit if limit is not None else MAX_DATA
+ if limit < 0: # no limit
+ data = response.read()
+ self.readlength = None
+ else:
+ data = response.read(limit + 1)
+ self.readlength = 0
+ if limit >= 0 and len(data) > limit:
+ raise ValueError("max payload length exceeded")
+ self.stringio = io.BytesIO(data)
+ gzip.GzipFile.__init__(self, mode="rb", fileobj=self.stringio)
+
+ def read(self, n):
+ if self.limit >= 0:
+ left = self.limit - self.readlength
+ n = min(n, left + 1)
+ data = gzip.GzipFile.read(self, n)
+ self.readlength += len(data)
+ if self.readlength > self.limit:
+ raise ValueError("max payload length exceeded")
+ return data
+ else:
+ return gzip.GzipFile.read(self, n)
+
+ def close(self):
+ gzip.GzipFile.close(self)
+ self.stringio.close()
+
+
+class DefusedExpatParser(ExpatParser):
+ def __init__(self, target, forbid_dtd=False, forbid_entities=True, forbid_external=True):
+ ExpatParser.__init__(self, target)
+ self.forbid_dtd = forbid_dtd
+ self.forbid_entities = forbid_entities
+ self.forbid_external = forbid_external
+ parser = self._parser
+ if self.forbid_dtd:
+ parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl
+ if self.forbid_entities:
+ parser.EntityDeclHandler = self.defused_entity_decl
+ parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl
+ if self.forbid_external:
+ parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler
+
+ def defused_start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
+ raise DTDForbidden(name, sysid, pubid)
+
+ def defused_entity_decl(
+ self, name, is_parameter_entity, value, base, sysid, pubid, notation_name
+ ):
+ raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
+
+ def defused_unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
+ # expat 1.2
+ raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name) # pragma: no cover
+
+ def defused_external_entity_ref_handler(self, context, base, sysid, pubid):
+ raise ExternalReferenceForbidden(context, base, sysid, pubid)
+
+
+def monkey_patch():
+ xmlrpc_client.FastParser = DefusedExpatParser
+ xmlrpc_client.GzipDecodedResponse = DefusedGzipDecodedResponse
+ xmlrpc_client.gzip_decode = defused_gzip_decode
+ if xmlrpc_server:
+ xmlrpc_server.gzip_decode = defused_gzip_decode
+
+
+def unmonkey_patch():
+ xmlrpc_client.FastParser = None
+ xmlrpc_client.GzipDecodedResponse = _OrigGzipDecodedResponse
+ xmlrpc_client.gzip_decode = _orig_gzip_decode
+ if xmlrpc_server:
+ xmlrpc_server.gzip_decode = _orig_gzip_decode
diff --git a/Voltage Vikings/lib/python3.9/site-packages/distutils-precedence.pth b/Voltage Vikings/lib/python3.9/site-packages/distutils-precedence.pth
new file mode 100644
index 00000000..6de4198f
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/distutils-precedence.pth
@@ -0,0 +1 @@
+import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'stdlib') == 'local'; enabled and __import__('_distutils_hack').add_shim();
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/django/__init__.py
new file mode 100644
index 00000000..6f566a00
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/__init__.py
@@ -0,0 +1,24 @@
+from django.utils.version import get_version
+
+VERSION = (4, 2, 0, "final", 0)
+
+__version__ = get_version(VERSION)
+
+
+def setup(set_prefix=True):
+ """
+ Configure the settings (this happens as a side effect of accessing the
+ first setting), configure logging and populate the app registry.
+ Set the thread-local urlresolvers script prefix if `set_prefix` is True.
+ """
+ from django.apps import apps
+ from django.conf import settings
+ from django.urls import set_script_prefix
+ from django.utils.log import configure_logging
+
+ configure_logging(settings.LOGGING_CONFIG, settings.LOGGING)
+ if set_prefix:
+ set_script_prefix(
+ "/" if settings.FORCE_SCRIPT_NAME is None else settings.FORCE_SCRIPT_NAME
+ )
+ apps.populate(settings.INSTALLED_APPS)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/__main__.py b/Voltage Vikings/lib/python3.9/site-packages/django/__main__.py
new file mode 100644
index 00000000..8b96e91e
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/__main__.py
@@ -0,0 +1,9 @@
+"""
+Invokes django-admin when the django module is run as a script.
+
+Example: python -m django check
+"""
+from django.core import management
+
+if __name__ == "__main__":
+ management.execute_from_command_line()
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/apps/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/django/apps/__init__.py
new file mode 100644
index 00000000..96674be7
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/apps/__init__.py
@@ -0,0 +1,4 @@
+from .config import AppConfig
+from .registry import apps
+
+__all__ = ["AppConfig", "apps"]
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/apps/config.py b/Voltage Vikings/lib/python3.9/site-packages/django/apps/config.py
new file mode 100644
index 00000000..28e50e52
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/apps/config.py
@@ -0,0 +1,274 @@
+import inspect
+import os
+from importlib import import_module
+
+from django.core.exceptions import ImproperlyConfigured
+from django.utils.functional import cached_property
+from django.utils.module_loading import import_string, module_has_submodule
+
+APPS_MODULE_NAME = "apps"
+MODELS_MODULE_NAME = "models"
+
+
+class AppConfig:
+ """Class representing a Django application and its configuration."""
+
+ def __init__(self, app_name, app_module):
+ # Full Python path to the application e.g. 'django.contrib.admin'.
+ self.name = app_name
+
+ # Root module for the application e.g. .
+ self.module = app_module
+
+ # Reference to the Apps registry that holds this AppConfig. Set by the
+ # registry when it registers the AppConfig instance.
+ self.apps = None
+
+ # The following attributes could be defined at the class level in a
+ # subclass, hence the test-and-set pattern.
+
+ # Last component of the Python path to the application e.g. 'admin'.
+ # This value must be unique across a Django project.
+ if not hasattr(self, "label"):
+ self.label = app_name.rpartition(".")[2]
+ if not self.label.isidentifier():
+ raise ImproperlyConfigured(
+ "The app label '%s' is not a valid Python identifier." % self.label
+ )
+
+ # Human-readable name for the application e.g. "Admin".
+ if not hasattr(self, "verbose_name"):
+ self.verbose_name = self.label.title()
+
+ # Filesystem path to the application directory e.g.
+ # '/path/to/django/contrib/admin'.
+ if not hasattr(self, "path"):
+ self.path = self._path_from_module(app_module)
+
+ # Module containing models e.g. . Set by import_models().
+ # None if the application doesn't have a models module.
+ self.models_module = None
+
+ # Mapping of lowercase model names to model classes. Initially set to
+ # None to prevent accidental access before import_models() runs.
+ self.models = None
+
+ def __repr__(self):
+ return "<%s: %s>" % (self.__class__.__name__, self.label)
+
+ @cached_property
+ def default_auto_field(self):
+ from django.conf import settings
+
+ return settings.DEFAULT_AUTO_FIELD
+
+ @property
+ def _is_default_auto_field_overridden(self):
+ return self.__class__.default_auto_field is not AppConfig.default_auto_field
+
+ def _path_from_module(self, module):
+ """Attempt to determine app's filesystem path from its module."""
+ # See #21874 for extended discussion of the behavior of this method in
+ # various cases.
+ # Convert to list because __path__ may not support indexing.
+ paths = list(getattr(module, "__path__", []))
+ if len(paths) != 1:
+ filename = getattr(module, "__file__", None)
+ if filename is not None:
+ paths = [os.path.dirname(filename)]
+ else:
+ # For unknown reasons, sometimes the list returned by __path__
+ # contains duplicates that must be removed (#25246).
+ paths = list(set(paths))
+ if len(paths) > 1:
+ raise ImproperlyConfigured(
+ "The app module %r has multiple filesystem locations (%r); "
+ "you must configure this app with an AppConfig subclass "
+ "with a 'path' class attribute." % (module, paths)
+ )
+ elif not paths:
+ raise ImproperlyConfigured(
+ "The app module %r has no filesystem location, "
+ "you must configure this app with an AppConfig subclass "
+ "with a 'path' class attribute." % module
+ )
+ return paths[0]
+
+ @classmethod
+ def create(cls, entry):
+ """
+ Factory that creates an app config from an entry in INSTALLED_APPS.
+ """
+ # create() eventually returns app_config_class(app_name, app_module).
+ app_config_class = None
+ app_name = None
+ app_module = None
+
+ # If import_module succeeds, entry points to the app module.
+ try:
+ app_module = import_module(entry)
+ except Exception:
+ pass
+ else:
+ # If app_module has an apps submodule that defines a single
+ # AppConfig subclass, use it automatically.
+ # To prevent this, an AppConfig subclass can declare a class
+ # variable default = False.
+ # If the apps module defines more than one AppConfig subclass,
+ # the default one can declare default = True.
+ if module_has_submodule(app_module, APPS_MODULE_NAME):
+ mod_path = "%s.%s" % (entry, APPS_MODULE_NAME)
+ mod = import_module(mod_path)
+ # Check if there's exactly one AppConfig candidate,
+ # excluding those that explicitly define default = False.
+ app_configs = [
+ (name, candidate)
+ for name, candidate in inspect.getmembers(mod, inspect.isclass)
+ if (
+ issubclass(candidate, cls)
+ and candidate is not cls
+ and getattr(candidate, "default", True)
+ )
+ ]
+ if len(app_configs) == 1:
+ app_config_class = app_configs[0][1]
+ else:
+ # Check if there's exactly one AppConfig subclass,
+ # among those that explicitly define default = True.
+ app_configs = [
+ (name, candidate)
+ for name, candidate in app_configs
+ if getattr(candidate, "default", False)
+ ]
+ if len(app_configs) > 1:
+ candidates = [repr(name) for name, _ in app_configs]
+ raise RuntimeError(
+ "%r declares more than one default AppConfig: "
+ "%s." % (mod_path, ", ".join(candidates))
+ )
+ elif len(app_configs) == 1:
+ app_config_class = app_configs[0][1]
+
+ # Use the default app config class if we didn't find anything.
+ if app_config_class is None:
+ app_config_class = cls
+ app_name = entry
+
+ # If import_string succeeds, entry is an app config class.
+ if app_config_class is None:
+ try:
+ app_config_class = import_string(entry)
+ except Exception:
+ pass
+ # If both import_module and import_string failed, it means that entry
+ # doesn't have a valid value.
+ if app_module is None and app_config_class is None:
+ # If the last component of entry starts with an uppercase letter,
+ # then it was likely intended to be an app config class; if not,
+ # an app module. Provide a nice error message in both cases.
+ mod_path, _, cls_name = entry.rpartition(".")
+ if mod_path and cls_name[0].isupper():
+ # We could simply re-trigger the string import exception, but
+ # we're going the extra mile and providing a better error
+ # message for typos in INSTALLED_APPS.
+ # This may raise ImportError, which is the best exception
+ # possible if the module at mod_path cannot be imported.
+ mod = import_module(mod_path)
+ candidates = [
+ repr(name)
+ for name, candidate in inspect.getmembers(mod, inspect.isclass)
+ if issubclass(candidate, cls) and candidate is not cls
+ ]
+ msg = "Module '%s' does not contain a '%s' class." % (
+ mod_path,
+ cls_name,
+ )
+ if candidates:
+ msg += " Choices are: %s." % ", ".join(candidates)
+ raise ImportError(msg)
+ else:
+ # Re-trigger the module import exception.
+ import_module(entry)
+
+ # Check for obvious errors. (This check prevents duck typing, but
+ # it could be removed if it became a problem in practice.)
+ if not issubclass(app_config_class, AppConfig):
+ raise ImproperlyConfigured("'%s' isn't a subclass of AppConfig." % entry)
+
+ # Obtain app name here rather than in AppClass.__init__ to keep
+ # all error checking for entries in INSTALLED_APPS in one place.
+ if app_name is None:
+ try:
+ app_name = app_config_class.name
+ except AttributeError:
+ raise ImproperlyConfigured("'%s' must supply a name attribute." % entry)
+
+ # Ensure app_name points to a valid module.
+ try:
+ app_module = import_module(app_name)
+ except ImportError:
+ raise ImproperlyConfigured(
+ "Cannot import '%s'. Check that '%s.%s.name' is correct."
+ % (
+ app_name,
+ app_config_class.__module__,
+ app_config_class.__qualname__,
+ )
+ )
+
+ # Entry is a path to an app config class.
+ return app_config_class(app_name, app_module)
+
+ def get_model(self, model_name, require_ready=True):
+ """
+ Return the model with the given case-insensitive model_name.
+
+ Raise LookupError if no model exists with this name.
+ """
+ if require_ready:
+ self.apps.check_models_ready()
+ else:
+ self.apps.check_apps_ready()
+ try:
+ return self.models[model_name.lower()]
+ except KeyError:
+ raise LookupError(
+ "App '%s' doesn't have a '%s' model." % (self.label, model_name)
+ )
+
+ def get_models(self, include_auto_created=False, include_swapped=False):
+ """
+ Return an iterable of models.
+
+ By default, the following models aren't included:
+
+ - auto-created models for many-to-many relations without
+ an explicit intermediate table,
+ - models that have been swapped out.
+
+ Set the corresponding keyword argument to True to include such models.
+ Keyword arguments aren't documented; they're a private API.
+ """
+ self.apps.check_models_ready()
+ for model in self.models.values():
+ if model._meta.auto_created and not include_auto_created:
+ continue
+ if model._meta.swapped and not include_swapped:
+ continue
+ yield model
+
+ def import_models(self):
+ # Dictionary of models for this app, primarily maintained in the
+ # 'all_models' attribute of the Apps this AppConfig is attached to.
+ self.models = self.apps.all_models[self.label]
+
+ if module_has_submodule(self.module, MODELS_MODULE_NAME):
+ models_module_name = "%s.%s" % (self.name, MODELS_MODULE_NAME)
+ self.models_module = import_module(models_module_name)
+
+ def ready(self):
+ """
+ Override this method in subclasses to run code when Django starts.
+ """
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/apps/registry.py b/Voltage Vikings/lib/python3.9/site-packages/django/apps/registry.py
new file mode 100644
index 00000000..4abf828c
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/apps/registry.py
@@ -0,0 +1,436 @@
+import functools
+import sys
+import threading
+import warnings
+from collections import Counter, defaultdict
+from functools import partial
+
+from django.core.exceptions import AppRegistryNotReady, ImproperlyConfigured
+
+from .config import AppConfig
+
+
+class Apps:
+ """
+ A registry that stores the configuration of installed applications.
+
+ It also keeps track of models, e.g. to provide reverse relations.
+ """
+
+ def __init__(self, installed_apps=()):
+ # installed_apps is set to None when creating the main registry
+ # because it cannot be populated at that point. Other registries must
+ # provide a list of installed apps and are populated immediately.
+ if installed_apps is None and hasattr(sys.modules[__name__], "apps"):
+ raise RuntimeError("You must supply an installed_apps argument.")
+
+ # Mapping of app labels => model names => model classes. Every time a
+ # model is imported, ModelBase.__new__ calls apps.register_model which
+ # creates an entry in all_models. All imported models are registered,
+ # regardless of whether they're defined in an installed application
+ # and whether the registry has been populated. Since it isn't possible
+ # to reimport a module safely (it could reexecute initialization code)
+ # all_models is never overridden or reset.
+ self.all_models = defaultdict(dict)
+
+ # Mapping of labels to AppConfig instances for installed apps.
+ self.app_configs = {}
+
+ # Stack of app_configs. Used to store the current state in
+ # set_available_apps and set_installed_apps.
+ self.stored_app_configs = []
+
+ # Whether the registry is populated.
+ self.apps_ready = self.models_ready = self.ready = False
+ # For the autoreloader.
+ self.ready_event = threading.Event()
+
+ # Lock for thread-safe population.
+ self._lock = threading.RLock()
+ self.loading = False
+
+ # Maps ("app_label", "modelname") tuples to lists of functions to be
+ # called when the corresponding model is ready. Used by this class's
+ # `lazy_model_operation()` and `do_pending_operations()` methods.
+ self._pending_operations = defaultdict(list)
+
+ # Populate apps and models, unless it's the main registry.
+ if installed_apps is not None:
+ self.populate(installed_apps)
+
+ def populate(self, installed_apps=None):
+ """
+ Load application configurations and models.
+
+ Import each application module and then each model module.
+
+ It is thread-safe and idempotent, but not reentrant.
+ """
+ if self.ready:
+ return
+
+ # populate() might be called by two threads in parallel on servers
+ # that create threads before initializing the WSGI callable.
+ with self._lock:
+ if self.ready:
+ return
+
+ # An RLock prevents other threads from entering this section. The
+ # compare and set operation below is atomic.
+ if self.loading:
+ # Prevent reentrant calls to avoid running AppConfig.ready()
+ # methods twice.
+ raise RuntimeError("populate() isn't reentrant")
+ self.loading = True
+
+ # Phase 1: initialize app configs and import app modules.
+ for entry in installed_apps:
+ if isinstance(entry, AppConfig):
+ app_config = entry
+ else:
+ app_config = AppConfig.create(entry)
+ if app_config.label in self.app_configs:
+ raise ImproperlyConfigured(
+ "Application labels aren't unique, "
+ "duplicates: %s" % app_config.label
+ )
+
+ self.app_configs[app_config.label] = app_config
+ app_config.apps = self
+
+ # Check for duplicate app names.
+ counts = Counter(
+ app_config.name for app_config in self.app_configs.values()
+ )
+ duplicates = [name for name, count in counts.most_common() if count > 1]
+ if duplicates:
+ raise ImproperlyConfigured(
+ "Application names aren't unique, "
+ "duplicates: %s" % ", ".join(duplicates)
+ )
+
+ self.apps_ready = True
+
+ # Phase 2: import models modules.
+ for app_config in self.app_configs.values():
+ app_config.import_models()
+
+ self.clear_cache()
+
+ self.models_ready = True
+
+ # Phase 3: run ready() methods of app configs.
+ for app_config in self.get_app_configs():
+ app_config.ready()
+
+ self.ready = True
+ self.ready_event.set()
+
+ def check_apps_ready(self):
+ """Raise an exception if all apps haven't been imported yet."""
+ if not self.apps_ready:
+ from django.conf import settings
+
+ # If "not ready" is due to unconfigured settings, accessing
+ # INSTALLED_APPS raises a more helpful ImproperlyConfigured
+ # exception.
+ settings.INSTALLED_APPS
+ raise AppRegistryNotReady("Apps aren't loaded yet.")
+
+ def check_models_ready(self):
+ """Raise an exception if all models haven't been imported yet."""
+ if not self.models_ready:
+ raise AppRegistryNotReady("Models aren't loaded yet.")
+
+ def get_app_configs(self):
+ """Import applications and return an iterable of app configs."""
+ self.check_apps_ready()
+ return self.app_configs.values()
+
+ def get_app_config(self, app_label):
+ """
+ Import applications and returns an app config for the given label.
+
+ Raise LookupError if no application exists with this label.
+ """
+ self.check_apps_ready()
+ try:
+ return self.app_configs[app_label]
+ except KeyError:
+ message = "No installed app with label '%s'." % app_label
+ for app_config in self.get_app_configs():
+ if app_config.name == app_label:
+ message += " Did you mean '%s'?" % app_config.label
+ break
+ raise LookupError(message)
+
+ # This method is performance-critical at least for Django's test suite.
+ @functools.lru_cache(maxsize=None)
+ def get_models(self, include_auto_created=False, include_swapped=False):
+ """
+ Return a list of all installed models.
+
+ By default, the following models aren't included:
+
+ - auto-created models for many-to-many relations without
+ an explicit intermediate table,
+ - models that have been swapped out.
+
+ Set the corresponding keyword argument to True to include such models.
+ """
+ self.check_models_ready()
+
+ result = []
+ for app_config in self.app_configs.values():
+ result.extend(app_config.get_models(include_auto_created, include_swapped))
+ return result
+
+ def get_model(self, app_label, model_name=None, require_ready=True):
+ """
+ Return the model matching the given app_label and model_name.
+
+ As a shortcut, app_label may be in the form ..
+
+ model_name is case-insensitive.
+
+ Raise LookupError if no application exists with this label, or no
+ model exists with this name in the application. Raise ValueError if
+ called with a single argument that doesn't contain exactly one dot.
+ """
+ if require_ready:
+ self.check_models_ready()
+ else:
+ self.check_apps_ready()
+
+ if model_name is None:
+ app_label, model_name = app_label.split(".")
+
+ app_config = self.get_app_config(app_label)
+
+ if not require_ready and app_config.models is None:
+ app_config.import_models()
+
+ return app_config.get_model(model_name, require_ready=require_ready)
+
+ def register_model(self, app_label, model):
+ # Since this method is called when models are imported, it cannot
+ # perform imports because of the risk of import loops. It mustn't
+ # call get_app_config().
+ model_name = model._meta.model_name
+ app_models = self.all_models[app_label]
+ if model_name in app_models:
+ if (
+ model.__name__ == app_models[model_name].__name__
+ and model.__module__ == app_models[model_name].__module__
+ ):
+ warnings.warn(
+ "Model '%s.%s' was already registered. Reloading models is not "
+ "advised as it can lead to inconsistencies, most notably with "
+ "related models." % (app_label, model_name),
+ RuntimeWarning,
+ stacklevel=2,
+ )
+ else:
+ raise RuntimeError(
+ "Conflicting '%s' models in application '%s': %s and %s."
+ % (model_name, app_label, app_models[model_name], model)
+ )
+ app_models[model_name] = model
+ self.do_pending_operations(model)
+ self.clear_cache()
+
+ def is_installed(self, app_name):
+ """
+ Check whether an application with this name exists in the registry.
+
+ app_name is the full name of the app e.g. 'django.contrib.admin'.
+ """
+ self.check_apps_ready()
+ return any(ac.name == app_name for ac in self.app_configs.values())
+
+ def get_containing_app_config(self, object_name):
+ """
+ Look for an app config containing a given object.
+
+ object_name is the dotted Python path to the object.
+
+ Return the app config for the inner application in case of nesting.
+ Return None if the object isn't in any registered app config.
+ """
+ self.check_apps_ready()
+ candidates = []
+ for app_config in self.app_configs.values():
+ if object_name.startswith(app_config.name):
+ subpath = object_name[len(app_config.name) :]
+ if subpath == "" or subpath[0] == ".":
+ candidates.append(app_config)
+ if candidates:
+ return sorted(candidates, key=lambda ac: -len(ac.name))[0]
+
+ def get_registered_model(self, app_label, model_name):
+ """
+ Similar to get_model(), but doesn't require that an app exists with
+ the given app_label.
+
+ It's safe to call this method at import time, even while the registry
+ is being populated.
+ """
+ model = self.all_models[app_label].get(model_name.lower())
+ if model is None:
+ raise LookupError("Model '%s.%s' not registered." % (app_label, model_name))
+ return model
+
+ @functools.lru_cache(maxsize=None)
+ def get_swappable_settings_name(self, to_string):
+ """
+ For a given model string (e.g. "auth.User"), return the name of the
+ corresponding settings name if it refers to a swappable model. If the
+ referred model is not swappable, return None.
+
+ This method is decorated with lru_cache because it's performance
+ critical when it comes to migrations. Since the swappable settings don't
+ change after Django has loaded the settings, there is no reason to get
+ the respective settings attribute over and over again.
+ """
+ to_string = to_string.lower()
+ for model in self.get_models(include_swapped=True):
+ swapped = model._meta.swapped
+ # Is this model swapped out for the model given by to_string?
+ if swapped and swapped.lower() == to_string:
+ return model._meta.swappable
+ # Is this model swappable and the one given by to_string?
+ if model._meta.swappable and model._meta.label_lower == to_string:
+ return model._meta.swappable
+ return None
+
+ def set_available_apps(self, available):
+ """
+ Restrict the set of installed apps used by get_app_config[s].
+
+ available must be an iterable of application names.
+
+ set_available_apps() must be balanced with unset_available_apps().
+
+ Primarily used for performance optimization in TransactionTestCase.
+
+ This method is safe in the sense that it doesn't trigger any imports.
+ """
+ available = set(available)
+ installed = {app_config.name for app_config in self.get_app_configs()}
+ if not available.issubset(installed):
+ raise ValueError(
+ "Available apps isn't a subset of installed apps, extra apps: %s"
+ % ", ".join(available - installed)
+ )
+
+ self.stored_app_configs.append(self.app_configs)
+ self.app_configs = {
+ label: app_config
+ for label, app_config in self.app_configs.items()
+ if app_config.name in available
+ }
+ self.clear_cache()
+
+ def unset_available_apps(self):
+ """Cancel a previous call to set_available_apps()."""
+ self.app_configs = self.stored_app_configs.pop()
+ self.clear_cache()
+
+ def set_installed_apps(self, installed):
+ """
+ Enable a different set of installed apps for get_app_config[s].
+
+ installed must be an iterable in the same format as INSTALLED_APPS.
+
+ set_installed_apps() must be balanced with unset_installed_apps(),
+ even if it exits with an exception.
+
+ Primarily used as a receiver of the setting_changed signal in tests.
+
+ This method may trigger new imports, which may add new models to the
+ registry of all imported models. They will stay in the registry even
+ after unset_installed_apps(). Since it isn't possible to replay
+ imports safely (e.g. that could lead to registering listeners twice),
+ models are registered when they're imported and never removed.
+ """
+ if not self.ready:
+ raise AppRegistryNotReady("App registry isn't ready yet.")
+ self.stored_app_configs.append(self.app_configs)
+ self.app_configs = {}
+ self.apps_ready = self.models_ready = self.loading = self.ready = False
+ self.clear_cache()
+ self.populate(installed)
+
+ def unset_installed_apps(self):
+ """Cancel a previous call to set_installed_apps()."""
+ self.app_configs = self.stored_app_configs.pop()
+ self.apps_ready = self.models_ready = self.ready = True
+ self.clear_cache()
+
+ def clear_cache(self):
+ """
+ Clear all internal caches, for methods that alter the app registry.
+
+ This is mostly used in tests.
+ """
+ # Call expire cache on each model. This will purge
+ # the relation tree and the fields cache.
+ self.get_models.cache_clear()
+ if self.ready:
+ # Circumvent self.get_models() to prevent that the cache is refilled.
+ # This particularly prevents that an empty value is cached while cloning.
+ for app_config in self.app_configs.values():
+ for model in app_config.get_models(include_auto_created=True):
+ model._meta._expire_cache()
+
+ def lazy_model_operation(self, function, *model_keys):
+ """
+ Take a function and a number of ("app_label", "modelname") tuples, and
+ when all the corresponding models have been imported and registered,
+ call the function with the model classes as its arguments.
+
+ The function passed to this method must accept exactly n models as
+ arguments, where n=len(model_keys).
+ """
+ # Base case: no arguments, just execute the function.
+ if not model_keys:
+ function()
+ # Recursive case: take the head of model_keys, wait for the
+ # corresponding model class to be imported and registered, then apply
+ # that argument to the supplied function. Pass the resulting partial
+ # to lazy_model_operation() along with the remaining model args and
+ # repeat until all models are loaded and all arguments are applied.
+ else:
+ next_model, *more_models = model_keys
+
+ # This will be executed after the class corresponding to next_model
+ # has been imported and registered. The `func` attribute provides
+ # duck-type compatibility with partials.
+ def apply_next_model(model):
+ next_function = partial(apply_next_model.func, model)
+ self.lazy_model_operation(next_function, *more_models)
+
+ apply_next_model.func = function
+
+ # If the model has already been imported and registered, partially
+ # apply it to the function now. If not, add it to the list of
+ # pending operations for the model, where it will be executed with
+ # the model class as its sole argument once the model is ready.
+ try:
+ model_class = self.get_registered_model(*next_model)
+ except LookupError:
+ self._pending_operations[next_model].append(apply_next_model)
+ else:
+ apply_next_model(model_class)
+
+ def do_pending_operations(self, model):
+ """
+ Take a newly-prepared model and pass it to each function waiting for
+ it. This is called at the very end of Apps.register_model().
+ """
+ key = model._meta.app_label, model._meta.model_name
+ for function in self._pending_operations.pop(key, []):
+ function(model)
+
+
+apps = Apps(installed_apps=None)
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/django/conf/__init__.py
new file mode 100644
index 00000000..7849abbd
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/__init__.py
@@ -0,0 +1,369 @@
+"""
+Settings and configuration for Django.
+
+Read values from the module specified by the DJANGO_SETTINGS_MODULE environment
+variable, and then from django.conf.global_settings; see the global_settings.py
+for a list of all possible variables.
+"""
+
+import importlib
+import os
+import time
+import traceback
+import warnings
+from pathlib import Path
+
+import django
+from django.conf import global_settings
+from django.core.exceptions import ImproperlyConfigured
+from django.utils.deprecation import RemovedInDjango50Warning, RemovedInDjango51Warning
+from django.utils.functional import LazyObject, empty
+
+ENVIRONMENT_VARIABLE = "DJANGO_SETTINGS_MODULE"
+DEFAULT_STORAGE_ALIAS = "default"
+STATICFILES_STORAGE_ALIAS = "staticfiles"
+
+# RemovedInDjango50Warning
+USE_DEPRECATED_PYTZ_DEPRECATED_MSG = (
+ "The USE_DEPRECATED_PYTZ setting, and support for pytz timezones is "
+ "deprecated in favor of the stdlib zoneinfo module. Please update your "
+ "code to use zoneinfo and remove the USE_DEPRECATED_PYTZ setting."
+)
+
+USE_L10N_DEPRECATED_MSG = (
+ "The USE_L10N setting is deprecated. Starting with Django 5.0, localized "
+ "formatting of data will always be enabled. For example Django will "
+ "display numbers and dates using the format of the current locale."
+)
+
+CSRF_COOKIE_MASKED_DEPRECATED_MSG = (
+ "The CSRF_COOKIE_MASKED transitional setting is deprecated. Support for "
+ "it will be removed in Django 5.0."
+)
+
+DEFAULT_FILE_STORAGE_DEPRECATED_MSG = (
+ "The DEFAULT_FILE_STORAGE setting is deprecated. Use STORAGES instead."
+)
+
+STATICFILES_STORAGE_DEPRECATED_MSG = (
+ "The STATICFILES_STORAGE setting is deprecated. Use STORAGES instead."
+)
+
+
+class SettingsReference(str):
+ """
+ String subclass which references a current settings value. It's treated as
+ the value in memory but serializes to a settings.NAME attribute reference.
+ """
+
+ def __new__(self, value, setting_name):
+ return str.__new__(self, value)
+
+ def __init__(self, value, setting_name):
+ self.setting_name = setting_name
+
+
+class LazySettings(LazyObject):
+ """
+ A lazy proxy for either global Django settings or a custom settings object.
+ The user can manually configure settings prior to using them. Otherwise,
+ Django uses the settings module pointed to by DJANGO_SETTINGS_MODULE.
+ """
+
+ def _setup(self, name=None):
+ """
+ Load the settings module pointed to by the environment variable. This
+ is used the first time settings are needed, if the user hasn't
+ configured settings manually.
+ """
+ settings_module = os.environ.get(ENVIRONMENT_VARIABLE)
+ if not settings_module:
+ desc = ("setting %s" % name) if name else "settings"
+ raise ImproperlyConfigured(
+ "Requested %s, but settings are not configured. "
+ "You must either define the environment variable %s "
+ "or call settings.configure() before accessing settings."
+ % (desc, ENVIRONMENT_VARIABLE)
+ )
+
+ self._wrapped = Settings(settings_module)
+
+ def __repr__(self):
+ # Hardcode the class name as otherwise it yields 'Settings'.
+ if self._wrapped is empty:
+ return ""
+ return '' % {
+ "settings_module": self._wrapped.SETTINGS_MODULE,
+ }
+
+ def __getattr__(self, name):
+ """Return the value of a setting and cache it in self.__dict__."""
+ if (_wrapped := self._wrapped) is empty:
+ self._setup(name)
+ _wrapped = self._wrapped
+ val = getattr(_wrapped, name)
+
+ # Special case some settings which require further modification.
+ # This is done here for performance reasons so the modified value is cached.
+ if name in {"MEDIA_URL", "STATIC_URL"} and val is not None:
+ val = self._add_script_prefix(val)
+ elif name == "SECRET_KEY" and not val:
+ raise ImproperlyConfigured("The SECRET_KEY setting must not be empty.")
+
+ self.__dict__[name] = val
+ return val
+
+ def __setattr__(self, name, value):
+ """
+ Set the value of setting. Clear all cached values if _wrapped changes
+ (@override_settings does this) or clear single values when set.
+ """
+ if name == "_wrapped":
+ self.__dict__.clear()
+ else:
+ self.__dict__.pop(name, None)
+ super().__setattr__(name, value)
+
+ def __delattr__(self, name):
+ """Delete a setting and clear it from cache if needed."""
+ super().__delattr__(name)
+ self.__dict__.pop(name, None)
+
+ def configure(self, default_settings=global_settings, **options):
+ """
+ Called to manually configure the settings. The 'default_settings'
+ parameter sets where to retrieve any unspecified values from (its
+ argument must support attribute access (__getattr__)).
+ """
+ if self._wrapped is not empty:
+ raise RuntimeError("Settings already configured.")
+ holder = UserSettingsHolder(default_settings)
+ for name, value in options.items():
+ if not name.isupper():
+ raise TypeError("Setting %r must be uppercase." % name)
+ setattr(holder, name, value)
+ self._wrapped = holder
+
+ @staticmethod
+ def _add_script_prefix(value):
+ """
+ Add SCRIPT_NAME prefix to relative paths.
+
+ Useful when the app is being served at a subpath and manually prefixing
+ subpath to STATIC_URL and MEDIA_URL in settings is inconvenient.
+ """
+ # Don't apply prefix to absolute paths and URLs.
+ if value.startswith(("http://", "https://", "/")):
+ return value
+ from django.urls import get_script_prefix
+
+ return "%s%s" % (get_script_prefix(), value)
+
+ @property
+ def configured(self):
+ """Return True if the settings have already been configured."""
+ return self._wrapped is not empty
+
+ def _show_deprecation_warning(self, message, category):
+ stack = traceback.extract_stack()
+ # Show a warning if the setting is used outside of Django.
+ # Stack index: -1 this line, -2 the property, -3 the
+ # LazyObject __getattribute__(), -4 the caller.
+ filename, _, _, _ = stack[-4]
+ if not filename.startswith(os.path.dirname(django.__file__)):
+ warnings.warn(message, category, stacklevel=2)
+
+ @property
+ def USE_L10N(self):
+ self._show_deprecation_warning(
+ USE_L10N_DEPRECATED_MSG, RemovedInDjango50Warning
+ )
+ return self.__getattr__("USE_L10N")
+
+ # RemovedInDjango50Warning.
+ @property
+ def _USE_L10N_INTERNAL(self):
+ # Special hook to avoid checking a traceback in internal use on hot
+ # paths.
+ return self.__getattr__("USE_L10N")
+
+ # RemovedInDjango51Warning.
+ @property
+ def DEFAULT_FILE_STORAGE(self):
+ self._show_deprecation_warning(
+ DEFAULT_FILE_STORAGE_DEPRECATED_MSG, RemovedInDjango51Warning
+ )
+ return self.__getattr__("DEFAULT_FILE_STORAGE")
+
+ # RemovedInDjango51Warning.
+ @property
+ def STATICFILES_STORAGE(self):
+ self._show_deprecation_warning(
+ STATICFILES_STORAGE_DEPRECATED_MSG, RemovedInDjango51Warning
+ )
+ return self.__getattr__("STATICFILES_STORAGE")
+
+
+class Settings:
+ def __init__(self, settings_module):
+ # update this dict from global settings (but only for ALL_CAPS settings)
+ for setting in dir(global_settings):
+ if setting.isupper():
+ setattr(self, setting, getattr(global_settings, setting))
+
+ # store the settings module in case someone later cares
+ self.SETTINGS_MODULE = settings_module
+
+ mod = importlib.import_module(self.SETTINGS_MODULE)
+
+ tuple_settings = (
+ "ALLOWED_HOSTS",
+ "INSTALLED_APPS",
+ "TEMPLATE_DIRS",
+ "LOCALE_PATHS",
+ "SECRET_KEY_FALLBACKS",
+ )
+ self._explicit_settings = set()
+ for setting in dir(mod):
+ if setting.isupper():
+ setting_value = getattr(mod, setting)
+
+ if setting in tuple_settings and not isinstance(
+ setting_value, (list, tuple)
+ ):
+ raise ImproperlyConfigured(
+ "The %s setting must be a list or a tuple." % setting
+ )
+ setattr(self, setting, setting_value)
+ self._explicit_settings.add(setting)
+
+ if self.USE_TZ is False and not self.is_overridden("USE_TZ"):
+ warnings.warn(
+ "The default value of USE_TZ will change from False to True "
+ "in Django 5.0. Set USE_TZ to False in your project settings "
+ "if you want to keep the current default behavior.",
+ category=RemovedInDjango50Warning,
+ )
+
+ if self.is_overridden("USE_DEPRECATED_PYTZ"):
+ warnings.warn(USE_DEPRECATED_PYTZ_DEPRECATED_MSG, RemovedInDjango50Warning)
+
+ if self.is_overridden("CSRF_COOKIE_MASKED"):
+ warnings.warn(CSRF_COOKIE_MASKED_DEPRECATED_MSG, RemovedInDjango50Warning)
+
+ if hasattr(time, "tzset") and self.TIME_ZONE:
+ # When we can, attempt to validate the timezone. If we can't find
+ # this file, no check happens and it's harmless.
+ zoneinfo_root = Path("/usr/share/zoneinfo")
+ zone_info_file = zoneinfo_root.joinpath(*self.TIME_ZONE.split("/"))
+ if zoneinfo_root.exists() and not zone_info_file.exists():
+ raise ValueError("Incorrect timezone setting: %s" % self.TIME_ZONE)
+ # Move the time zone info into os.environ. See ticket #2315 for why
+ # we don't do this unconditionally (breaks Windows).
+ os.environ["TZ"] = self.TIME_ZONE
+ time.tzset()
+
+ if self.is_overridden("USE_L10N"):
+ warnings.warn(USE_L10N_DEPRECATED_MSG, RemovedInDjango50Warning)
+
+ if self.is_overridden("DEFAULT_FILE_STORAGE"):
+ if self.is_overridden("STORAGES"):
+ raise ImproperlyConfigured(
+ "DEFAULT_FILE_STORAGE/STORAGES are mutually exclusive."
+ )
+ warnings.warn(DEFAULT_FILE_STORAGE_DEPRECATED_MSG, RemovedInDjango51Warning)
+
+ if self.is_overridden("STATICFILES_STORAGE"):
+ if self.is_overridden("STORAGES"):
+ raise ImproperlyConfigured(
+ "STATICFILES_STORAGE/STORAGES are mutually exclusive."
+ )
+ warnings.warn(STATICFILES_STORAGE_DEPRECATED_MSG, RemovedInDjango51Warning)
+
+ def is_overridden(self, setting):
+ return setting in self._explicit_settings
+
+ def __repr__(self):
+ return '<%(cls)s "%(settings_module)s">' % {
+ "cls": self.__class__.__name__,
+ "settings_module": self.SETTINGS_MODULE,
+ }
+
+
+class UserSettingsHolder:
+ """Holder for user configured settings."""
+
+ # SETTINGS_MODULE doesn't make much sense in the manually configured
+ # (standalone) case.
+ SETTINGS_MODULE = None
+
+ def __init__(self, default_settings):
+ """
+ Requests for configuration variables not in this class are satisfied
+ from the module specified in default_settings (if possible).
+ """
+ self.__dict__["_deleted"] = set()
+ self.default_settings = default_settings
+
+ def __getattr__(self, name):
+ if not name.isupper() or name in self._deleted:
+ raise AttributeError
+ return getattr(self.default_settings, name)
+
+ def __setattr__(self, name, value):
+ self._deleted.discard(name)
+ if name == "USE_L10N":
+ warnings.warn(USE_L10N_DEPRECATED_MSG, RemovedInDjango50Warning)
+ if name == "CSRF_COOKIE_MASKED":
+ warnings.warn(CSRF_COOKIE_MASKED_DEPRECATED_MSG, RemovedInDjango50Warning)
+ if name == "DEFAULT_FILE_STORAGE":
+ self.STORAGES[DEFAULT_STORAGE_ALIAS] = {
+ "BACKEND": self.DEFAULT_FILE_STORAGE
+ }
+ warnings.warn(DEFAULT_FILE_STORAGE_DEPRECATED_MSG, RemovedInDjango51Warning)
+ if name == "STATICFILES_STORAGE":
+ self.STORAGES[STATICFILES_STORAGE_ALIAS] = {
+ "BACKEND": self.STATICFILES_STORAGE
+ }
+ warnings.warn(STATICFILES_STORAGE_DEPRECATED_MSG, RemovedInDjango51Warning)
+ super().__setattr__(name, value)
+ if name == "USE_DEPRECATED_PYTZ":
+ warnings.warn(USE_DEPRECATED_PYTZ_DEPRECATED_MSG, RemovedInDjango50Warning)
+ # RemovedInDjango51Warning.
+ if name == "STORAGES":
+ self.STORAGES.setdefault(
+ DEFAULT_STORAGE_ALIAS,
+ {"BACKEND": "django.core.files.storage.FileSystemStorage"},
+ )
+ self.STORAGES.setdefault(
+ STATICFILES_STORAGE_ALIAS,
+ {"BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage"},
+ )
+
+ def __delattr__(self, name):
+ self._deleted.add(name)
+ if hasattr(self, name):
+ super().__delattr__(name)
+
+ def __dir__(self):
+ return sorted(
+ s
+ for s in [*self.__dict__, *dir(self.default_settings)]
+ if s not in self._deleted
+ )
+
+ def is_overridden(self, setting):
+ deleted = setting in self._deleted
+ set_locally = setting in self.__dict__
+ set_on_default = getattr(
+ self.default_settings, "is_overridden", lambda s: False
+ )(setting)
+ return deleted or set_locally or set_on_default
+
+ def __repr__(self):
+ return "<%(cls)s>" % {
+ "cls": self.__class__.__name__,
+ }
+
+
+settings = LazySettings()
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/__init__.py-tpl b/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/__init__.py-tpl
new file mode 100644
index 00000000..e69de29b
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/admin.py-tpl b/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/admin.py-tpl
new file mode 100644
index 00000000..8c38f3f3
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/admin.py-tpl
@@ -0,0 +1,3 @@
+from django.contrib import admin
+
+# Register your models here.
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/apps.py-tpl b/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/apps.py-tpl
new file mode 100644
index 00000000..b7053521
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/apps.py-tpl
@@ -0,0 +1,6 @@
+from django.apps import AppConfig
+
+
+class {{ camel_case_app_name }}Config(AppConfig):
+ default_auto_field = 'django.db.models.BigAutoField'
+ name = '{{ app_name }}'
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/migrations/__init__.py-tpl b/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/migrations/__init__.py-tpl
new file mode 100644
index 00000000..e69de29b
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/models.py-tpl b/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/models.py-tpl
new file mode 100644
index 00000000..71a83623
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/models.py-tpl
@@ -0,0 +1,3 @@
+from django.db import models
+
+# Create your models here.
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/tests.py-tpl b/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/tests.py-tpl
new file mode 100644
index 00000000..7ce503c2
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/tests.py-tpl
@@ -0,0 +1,3 @@
+from django.test import TestCase
+
+# Create your tests here.
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/views.py-tpl b/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/views.py-tpl
new file mode 100644
index 00000000..91ea44a2
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/app_template/views.py-tpl
@@ -0,0 +1,3 @@
+from django.shortcuts import render
+
+# Create your views here.
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/global_settings.py b/Voltage Vikings/lib/python3.9/site-packages/django/conf/global_settings.py
new file mode 100644
index 00000000..c2550262
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/global_settings.py
@@ -0,0 +1,681 @@
+"""
+Default Django settings. Override these with settings in the module pointed to
+by the DJANGO_SETTINGS_MODULE environment variable.
+"""
+
+
+# This is defined here as a do-nothing function because we can't import
+# django.utils.translation -- that module depends on the settings.
+def gettext_noop(s):
+ return s
+
+
+####################
+# CORE #
+####################
+
+DEBUG = False
+
+# Whether the framework should propagate raw exceptions rather than catching
+# them. This is useful under some testing situations and should never be used
+# on a live site.
+DEBUG_PROPAGATE_EXCEPTIONS = False
+
+# People who get code error notifications. In the format
+# [('Full Name', 'email@example.com'), ('Full Name', 'anotheremail@example.com')]
+ADMINS = []
+
+# List of IP addresses, as strings, that:
+# * See debug comments, when DEBUG is true
+# * Receive x-headers
+INTERNAL_IPS = []
+
+# Hosts/domain names that are valid for this site.
+# "*" matches anything, ".example.com" matches example.com and all subdomains
+ALLOWED_HOSTS = []
+
+# Local time zone for this installation. All choices can be found here:
+# https://en.wikipedia.org/wiki/List_of_tz_zones_by_name (although not all
+# systems may support all possibilities). When USE_TZ is True, this is
+# interpreted as the default user time zone.
+TIME_ZONE = "America/Chicago"
+
+# If you set this to True, Django will use timezone-aware datetimes.
+USE_TZ = False
+
+# RemovedInDjango50Warning: It's a transitional setting helpful in migrating
+# from pytz tzinfo to ZoneInfo(). Set True to continue using pytz tzinfo
+# objects during the Django 4.x release cycle.
+USE_DEPRECATED_PYTZ = False
+
+# Language code for this installation. All choices can be found here:
+# http://www.i18nguy.com/unicode/language-identifiers.html
+LANGUAGE_CODE = "en-us"
+
+# Languages we provide translations for, out of the box.
+LANGUAGES = [
+ ("af", gettext_noop("Afrikaans")),
+ ("ar", gettext_noop("Arabic")),
+ ("ar-dz", gettext_noop("Algerian Arabic")),
+ ("ast", gettext_noop("Asturian")),
+ ("az", gettext_noop("Azerbaijani")),
+ ("bg", gettext_noop("Bulgarian")),
+ ("be", gettext_noop("Belarusian")),
+ ("bn", gettext_noop("Bengali")),
+ ("br", gettext_noop("Breton")),
+ ("bs", gettext_noop("Bosnian")),
+ ("ca", gettext_noop("Catalan")),
+ ("ckb", gettext_noop("Central Kurdish (Sorani)")),
+ ("cs", gettext_noop("Czech")),
+ ("cy", gettext_noop("Welsh")),
+ ("da", gettext_noop("Danish")),
+ ("de", gettext_noop("German")),
+ ("dsb", gettext_noop("Lower Sorbian")),
+ ("el", gettext_noop("Greek")),
+ ("en", gettext_noop("English")),
+ ("en-au", gettext_noop("Australian English")),
+ ("en-gb", gettext_noop("British English")),
+ ("eo", gettext_noop("Esperanto")),
+ ("es", gettext_noop("Spanish")),
+ ("es-ar", gettext_noop("Argentinian Spanish")),
+ ("es-co", gettext_noop("Colombian Spanish")),
+ ("es-mx", gettext_noop("Mexican Spanish")),
+ ("es-ni", gettext_noop("Nicaraguan Spanish")),
+ ("es-ve", gettext_noop("Venezuelan Spanish")),
+ ("et", gettext_noop("Estonian")),
+ ("eu", gettext_noop("Basque")),
+ ("fa", gettext_noop("Persian")),
+ ("fi", gettext_noop("Finnish")),
+ ("fr", gettext_noop("French")),
+ ("fy", gettext_noop("Frisian")),
+ ("ga", gettext_noop("Irish")),
+ ("gd", gettext_noop("Scottish Gaelic")),
+ ("gl", gettext_noop("Galician")),
+ ("he", gettext_noop("Hebrew")),
+ ("hi", gettext_noop("Hindi")),
+ ("hr", gettext_noop("Croatian")),
+ ("hsb", gettext_noop("Upper Sorbian")),
+ ("hu", gettext_noop("Hungarian")),
+ ("hy", gettext_noop("Armenian")),
+ ("ia", gettext_noop("Interlingua")),
+ ("id", gettext_noop("Indonesian")),
+ ("ig", gettext_noop("Igbo")),
+ ("io", gettext_noop("Ido")),
+ ("is", gettext_noop("Icelandic")),
+ ("it", gettext_noop("Italian")),
+ ("ja", gettext_noop("Japanese")),
+ ("ka", gettext_noop("Georgian")),
+ ("kab", gettext_noop("Kabyle")),
+ ("kk", gettext_noop("Kazakh")),
+ ("km", gettext_noop("Khmer")),
+ ("kn", gettext_noop("Kannada")),
+ ("ko", gettext_noop("Korean")),
+ ("ky", gettext_noop("Kyrgyz")),
+ ("lb", gettext_noop("Luxembourgish")),
+ ("lt", gettext_noop("Lithuanian")),
+ ("lv", gettext_noop("Latvian")),
+ ("mk", gettext_noop("Macedonian")),
+ ("ml", gettext_noop("Malayalam")),
+ ("mn", gettext_noop("Mongolian")),
+ ("mr", gettext_noop("Marathi")),
+ ("ms", gettext_noop("Malay")),
+ ("my", gettext_noop("Burmese")),
+ ("nb", gettext_noop("Norwegian Bokmål")),
+ ("ne", gettext_noop("Nepali")),
+ ("nl", gettext_noop("Dutch")),
+ ("nn", gettext_noop("Norwegian Nynorsk")),
+ ("os", gettext_noop("Ossetic")),
+ ("pa", gettext_noop("Punjabi")),
+ ("pl", gettext_noop("Polish")),
+ ("pt", gettext_noop("Portuguese")),
+ ("pt-br", gettext_noop("Brazilian Portuguese")),
+ ("ro", gettext_noop("Romanian")),
+ ("ru", gettext_noop("Russian")),
+ ("sk", gettext_noop("Slovak")),
+ ("sl", gettext_noop("Slovenian")),
+ ("sq", gettext_noop("Albanian")),
+ ("sr", gettext_noop("Serbian")),
+ ("sr-latn", gettext_noop("Serbian Latin")),
+ ("sv", gettext_noop("Swedish")),
+ ("sw", gettext_noop("Swahili")),
+ ("ta", gettext_noop("Tamil")),
+ ("te", gettext_noop("Telugu")),
+ ("tg", gettext_noop("Tajik")),
+ ("th", gettext_noop("Thai")),
+ ("tk", gettext_noop("Turkmen")),
+ ("tr", gettext_noop("Turkish")),
+ ("tt", gettext_noop("Tatar")),
+ ("udm", gettext_noop("Udmurt")),
+ ("uk", gettext_noop("Ukrainian")),
+ ("ur", gettext_noop("Urdu")),
+ ("uz", gettext_noop("Uzbek")),
+ ("vi", gettext_noop("Vietnamese")),
+ ("zh-hans", gettext_noop("Simplified Chinese")),
+ ("zh-hant", gettext_noop("Traditional Chinese")),
+]
+
+# Languages using BiDi (right-to-left) layout
+LANGUAGES_BIDI = ["he", "ar", "ar-dz", "ckb", "fa", "ur"]
+
+# If you set this to False, Django will make some optimizations so as not
+# to load the internationalization machinery.
+USE_I18N = True
+LOCALE_PATHS = []
+
+# Settings for language cookie
+LANGUAGE_COOKIE_NAME = "django_language"
+LANGUAGE_COOKIE_AGE = None
+LANGUAGE_COOKIE_DOMAIN = None
+LANGUAGE_COOKIE_PATH = "/"
+LANGUAGE_COOKIE_SECURE = False
+LANGUAGE_COOKIE_HTTPONLY = False
+LANGUAGE_COOKIE_SAMESITE = None
+
+
+# If you set this to True, Django will format dates, numbers and calendars
+# according to user current locale.
+USE_L10N = True
+
+# Not-necessarily-technical managers of the site. They get broken link
+# notifications and other various emails.
+MANAGERS = ADMINS
+
+# Default charset to use for all HttpResponse objects, if a MIME type isn't
+# manually specified. It's used to construct the Content-Type header.
+DEFAULT_CHARSET = "utf-8"
+
+# Email address that error messages come from.
+SERVER_EMAIL = "root@localhost"
+
+# Database connection info. If left empty, will default to the dummy backend.
+DATABASES = {}
+
+# Classes used to implement DB routing behavior.
+DATABASE_ROUTERS = []
+
+# The email backend to use. For possible shortcuts see django.core.mail.
+# The default is to use the SMTP backend.
+# Third-party backends can be specified by providing a Python path
+# to a module that defines an EmailBackend class.
+EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
+
+# Host for sending email.
+EMAIL_HOST = "localhost"
+
+# Port for sending email.
+EMAIL_PORT = 25
+
+# Whether to send SMTP 'Date' header in the local time zone or in UTC.
+EMAIL_USE_LOCALTIME = False
+
+# Optional SMTP authentication information for EMAIL_HOST.
+EMAIL_HOST_USER = ""
+EMAIL_HOST_PASSWORD = ""
+EMAIL_USE_TLS = False
+EMAIL_USE_SSL = False
+EMAIL_SSL_CERTFILE = None
+EMAIL_SSL_KEYFILE = None
+EMAIL_TIMEOUT = None
+
+# List of strings representing installed apps.
+INSTALLED_APPS = []
+
+TEMPLATES = []
+
+# Default form rendering class.
+FORM_RENDERER = "django.forms.renderers.DjangoTemplates"
+
+# Default email address to use for various automated correspondence from
+# the site managers.
+DEFAULT_FROM_EMAIL = "webmaster@localhost"
+
+# Subject-line prefix for email messages send with django.core.mail.mail_admins
+# or ...mail_managers. Make sure to include the trailing space.
+EMAIL_SUBJECT_PREFIX = "[Django] "
+
+# Whether to append trailing slashes to URLs.
+APPEND_SLASH = True
+
+# Whether to prepend the "www." subdomain to URLs that don't have it.
+PREPEND_WWW = False
+
+# Override the server-derived value of SCRIPT_NAME
+FORCE_SCRIPT_NAME = None
+
+# List of compiled regular expression objects representing User-Agent strings
+# that are not allowed to visit any page, systemwide. Use this for bad
+# robots/crawlers. Here are a few examples:
+# import re
+# DISALLOWED_USER_AGENTS = [
+# re.compile(r'^NaverBot.*'),
+# re.compile(r'^EmailSiphon.*'),
+# re.compile(r'^SiteSucker.*'),
+# re.compile(r'^sohu-search'),
+# ]
+DISALLOWED_USER_AGENTS = []
+
+ABSOLUTE_URL_OVERRIDES = {}
+
+# List of compiled regular expression objects representing URLs that need not
+# be reported by BrokenLinkEmailsMiddleware. Here are a few examples:
+# import re
+# IGNORABLE_404_URLS = [
+# re.compile(r'^/apple-touch-icon.*\.png$'),
+# re.compile(r'^/favicon.ico$'),
+# re.compile(r'^/robots.txt$'),
+# re.compile(r'^/phpmyadmin/'),
+# re.compile(r'\.(cgi|php|pl)$'),
+# ]
+IGNORABLE_404_URLS = []
+
+# A secret key for this particular Django installation. Used in secret-key
+# hashing algorithms. Set this in your settings, or Django will complain
+# loudly.
+SECRET_KEY = ""
+
+# List of secret keys used to verify the validity of signatures. This allows
+# secret key rotation.
+SECRET_KEY_FALLBACKS = []
+
+# Default file storage mechanism that holds media.
+DEFAULT_FILE_STORAGE = "django.core.files.storage.FileSystemStorage"
+
+STORAGES = {
+ "default": {
+ "BACKEND": "django.core.files.storage.FileSystemStorage",
+ },
+ "staticfiles": {
+ "BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage",
+ },
+}
+
+# Absolute filesystem path to the directory that will hold user-uploaded files.
+# Example: "/var/www/example.com/media/"
+MEDIA_ROOT = ""
+
+# URL that handles the media served from MEDIA_ROOT.
+# Examples: "http://example.com/media/", "http://media.example.com/"
+MEDIA_URL = ""
+
+# Absolute path to the directory static files should be collected to.
+# Example: "/var/www/example.com/static/"
+STATIC_ROOT = None
+
+# URL that handles the static files served from STATIC_ROOT.
+# Example: "http://example.com/static/", "http://static.example.com/"
+STATIC_URL = None
+
+# List of upload handler classes to be applied in order.
+FILE_UPLOAD_HANDLERS = [
+ "django.core.files.uploadhandler.MemoryFileUploadHandler",
+ "django.core.files.uploadhandler.TemporaryFileUploadHandler",
+]
+
+# Maximum size, in bytes, of a request before it will be streamed to the
+# file system instead of into memory.
+FILE_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
+
+# Maximum size in bytes of request data (excluding file uploads) that will be
+# read before a SuspiciousOperation (RequestDataTooBig) is raised.
+DATA_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
+
+# Maximum number of GET/POST parameters that will be read before a
+# SuspiciousOperation (TooManyFieldsSent) is raised.
+DATA_UPLOAD_MAX_NUMBER_FIELDS = 1000
+
+# Maximum number of files encoded in a multipart upload that will be read
+# before a SuspiciousOperation (TooManyFilesSent) is raised.
+DATA_UPLOAD_MAX_NUMBER_FILES = 100
+
+# Directory in which upload streamed files will be temporarily saved. A value of
+# `None` will make Django use the operating system's default temporary directory
+# (i.e. "/tmp" on *nix systems).
+FILE_UPLOAD_TEMP_DIR = None
+
+# The numeric mode to set newly-uploaded files to. The value should be a mode
+# you'd pass directly to os.chmod; see
+# https://docs.python.org/library/os.html#files-and-directories.
+FILE_UPLOAD_PERMISSIONS = 0o644
+
+# The numeric mode to assign to newly-created directories, when uploading files.
+# The value should be a mode as you'd pass to os.chmod;
+# see https://docs.python.org/library/os.html#files-and-directories.
+FILE_UPLOAD_DIRECTORY_PERMISSIONS = None
+
+# Python module path where user will place custom format definition.
+# The directory where this setting is pointing should contain subdirectories
+# named as the locales, containing a formats.py file
+# (i.e. "myproject.locale" for myproject/locale/en/formats.py etc. use)
+FORMAT_MODULE_PATH = None
+
+# Default formatting for date objects. See all available format strings here:
+# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+DATE_FORMAT = "N j, Y"
+
+# Default formatting for datetime objects. See all available format strings here:
+# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+DATETIME_FORMAT = "N j, Y, P"
+
+# Default formatting for time objects. See all available format strings here:
+# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+TIME_FORMAT = "P"
+
+# Default formatting for date objects when only the year and month are relevant.
+# See all available format strings here:
+# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+YEAR_MONTH_FORMAT = "F Y"
+
+# Default formatting for date objects when only the month and day are relevant.
+# See all available format strings here:
+# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+MONTH_DAY_FORMAT = "F j"
+
+# Default short formatting for date objects. See all available format strings here:
+# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+SHORT_DATE_FORMAT = "m/d/Y"
+
+# Default short formatting for datetime objects.
+# See all available format strings here:
+# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+SHORT_DATETIME_FORMAT = "m/d/Y P"
+
+# Default formats to be used when parsing dates from input boxes, in order
+# See all available format string here:
+# https://docs.python.org/library/datetime.html#strftime-behavior
+# * Note that these format strings are different from the ones to display dates
+DATE_INPUT_FORMATS = [
+ "%Y-%m-%d", # '2006-10-25'
+ "%m/%d/%Y", # '10/25/2006'
+ "%m/%d/%y", # '10/25/06'
+ "%b %d %Y", # 'Oct 25 2006'
+ "%b %d, %Y", # 'Oct 25, 2006'
+ "%d %b %Y", # '25 Oct 2006'
+ "%d %b, %Y", # '25 Oct, 2006'
+ "%B %d %Y", # 'October 25 2006'
+ "%B %d, %Y", # 'October 25, 2006'
+ "%d %B %Y", # '25 October 2006'
+ "%d %B, %Y", # '25 October, 2006'
+]
+
+# Default formats to be used when parsing times from input boxes, in order
+# See all available format string here:
+# https://docs.python.org/library/datetime.html#strftime-behavior
+# * Note that these format strings are different from the ones to display dates
+TIME_INPUT_FORMATS = [
+ "%H:%M:%S", # '14:30:59'
+ "%H:%M:%S.%f", # '14:30:59.000200'
+ "%H:%M", # '14:30'
+]
+
+# Default formats to be used when parsing dates and times from input boxes,
+# in order
+# See all available format string here:
+# https://docs.python.org/library/datetime.html#strftime-behavior
+# * Note that these format strings are different from the ones to display dates
+DATETIME_INPUT_FORMATS = [
+ "%Y-%m-%d %H:%M:%S", # '2006-10-25 14:30:59'
+ "%Y-%m-%d %H:%M:%S.%f", # '2006-10-25 14:30:59.000200'
+ "%Y-%m-%d %H:%M", # '2006-10-25 14:30'
+ "%m/%d/%Y %H:%M:%S", # '10/25/2006 14:30:59'
+ "%m/%d/%Y %H:%M:%S.%f", # '10/25/2006 14:30:59.000200'
+ "%m/%d/%Y %H:%M", # '10/25/2006 14:30'
+ "%m/%d/%y %H:%M:%S", # '10/25/06 14:30:59'
+ "%m/%d/%y %H:%M:%S.%f", # '10/25/06 14:30:59.000200'
+ "%m/%d/%y %H:%M", # '10/25/06 14:30'
+]
+
+# First day of week, to be used on calendars
+# 0 means Sunday, 1 means Monday...
+FIRST_DAY_OF_WEEK = 0
+
+# Decimal separator symbol
+DECIMAL_SEPARATOR = "."
+
+# Boolean that sets whether to add thousand separator when formatting numbers
+USE_THOUSAND_SEPARATOR = False
+
+# Number of digits that will be together, when splitting them by
+# THOUSAND_SEPARATOR. 0 means no grouping, 3 means splitting by thousands...
+NUMBER_GROUPING = 0
+
+# Thousand separator symbol
+THOUSAND_SEPARATOR = ","
+
+# The tablespaces to use for each model when not specified otherwise.
+DEFAULT_TABLESPACE = ""
+DEFAULT_INDEX_TABLESPACE = ""
+
+# Default primary key field type.
+DEFAULT_AUTO_FIELD = "django.db.models.AutoField"
+
+# Default X-Frame-Options header value
+X_FRAME_OPTIONS = "DENY"
+
+USE_X_FORWARDED_HOST = False
+USE_X_FORWARDED_PORT = False
+
+# The Python dotted path to the WSGI application that Django's internal server
+# (runserver) will use. If `None`, the return value of
+# 'django.core.wsgi.get_wsgi_application' is used, thus preserving the same
+# behavior as previous versions of Django. Otherwise this should point to an
+# actual WSGI application object.
+WSGI_APPLICATION = None
+
+# If your Django app is behind a proxy that sets a header to specify secure
+# connections, AND that proxy ensures that user-submitted headers with the
+# same name are ignored (so that people can't spoof it), set this value to
+# a tuple of (header_name, header_value). For any requests that come in with
+# that header/value, request.is_secure() will return True.
+# WARNING! Only set this if you fully understand what you're doing. Otherwise,
+# you may be opening yourself up to a security risk.
+SECURE_PROXY_SSL_HEADER = None
+
+##############
+# MIDDLEWARE #
+##############
+
+# List of middleware to use. Order is important; in the request phase, these
+# middleware will be applied in the order given, and in the response
+# phase the middleware will be applied in reverse order.
+MIDDLEWARE = []
+
+############
+# SESSIONS #
+############
+
+# Cache to store session data if using the cache session backend.
+SESSION_CACHE_ALIAS = "default"
+# Cookie name. This can be whatever you want.
+SESSION_COOKIE_NAME = "sessionid"
+# Age of cookie, in seconds (default: 2 weeks).
+SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 * 2
+# A string like "example.com", or None for standard domain cookie.
+SESSION_COOKIE_DOMAIN = None
+# Whether the session cookie should be secure (https:// only).
+SESSION_COOKIE_SECURE = False
+# The path of the session cookie.
+SESSION_COOKIE_PATH = "/"
+# Whether to use the HttpOnly flag.
+SESSION_COOKIE_HTTPONLY = True
+# Whether to set the flag restricting cookie leaks on cross-site requests.
+# This can be 'Lax', 'Strict', 'None', or False to disable the flag.
+SESSION_COOKIE_SAMESITE = "Lax"
+# Whether to save the session data on every request.
+SESSION_SAVE_EVERY_REQUEST = False
+# Whether a user's session cookie expires when the web browser is closed.
+SESSION_EXPIRE_AT_BROWSER_CLOSE = False
+# The module to store session data
+SESSION_ENGINE = "django.contrib.sessions.backends.db"
+# Directory to store session files if using the file session module. If None,
+# the backend will use a sensible default.
+SESSION_FILE_PATH = None
+# class to serialize session data
+SESSION_SERIALIZER = "django.contrib.sessions.serializers.JSONSerializer"
+
+#########
+# CACHE #
+#########
+
+# The cache backends to use.
+CACHES = {
+ "default": {
+ "BACKEND": "django.core.cache.backends.locmem.LocMemCache",
+ }
+}
+CACHE_MIDDLEWARE_KEY_PREFIX = ""
+CACHE_MIDDLEWARE_SECONDS = 600
+CACHE_MIDDLEWARE_ALIAS = "default"
+
+##################
+# AUTHENTICATION #
+##################
+
+AUTH_USER_MODEL = "auth.User"
+
+AUTHENTICATION_BACKENDS = ["django.contrib.auth.backends.ModelBackend"]
+
+LOGIN_URL = "/accounts/login/"
+
+LOGIN_REDIRECT_URL = "/accounts/profile/"
+
+LOGOUT_REDIRECT_URL = None
+
+# The number of seconds a password reset link is valid for (default: 3 days).
+PASSWORD_RESET_TIMEOUT = 60 * 60 * 24 * 3
+
+# the first hasher in this list is the preferred algorithm. any
+# password using different algorithms will be converted automatically
+# upon login
+PASSWORD_HASHERS = [
+ "django.contrib.auth.hashers.PBKDF2PasswordHasher",
+ "django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher",
+ "django.contrib.auth.hashers.Argon2PasswordHasher",
+ "django.contrib.auth.hashers.BCryptSHA256PasswordHasher",
+ "django.contrib.auth.hashers.ScryptPasswordHasher",
+]
+
+AUTH_PASSWORD_VALIDATORS = []
+
+###########
+# SIGNING #
+###########
+
+SIGNING_BACKEND = "django.core.signing.TimestampSigner"
+
+########
+# CSRF #
+########
+
+# Dotted path to callable to be used as view when a request is
+# rejected by the CSRF middleware.
+CSRF_FAILURE_VIEW = "django.views.csrf.csrf_failure"
+
+# Settings for CSRF cookie.
+CSRF_COOKIE_NAME = "csrftoken"
+CSRF_COOKIE_AGE = 60 * 60 * 24 * 7 * 52
+CSRF_COOKIE_DOMAIN = None
+CSRF_COOKIE_PATH = "/"
+CSRF_COOKIE_SECURE = False
+CSRF_COOKIE_HTTPONLY = False
+CSRF_COOKIE_SAMESITE = "Lax"
+CSRF_HEADER_NAME = "HTTP_X_CSRFTOKEN"
+CSRF_TRUSTED_ORIGINS = []
+CSRF_USE_SESSIONS = False
+
+# Whether to mask CSRF cookie value. It's a transitional setting helpful in
+# migrating multiple instance of the same project to Django 4.1+.
+CSRF_COOKIE_MASKED = False
+
+############
+# MESSAGES #
+############
+
+# Class to use as messages backend
+MESSAGE_STORAGE = "django.contrib.messages.storage.fallback.FallbackStorage"
+
+# Default values of MESSAGE_LEVEL and MESSAGE_TAGS are defined within
+# django.contrib.messages to avoid imports in this settings file.
+
+###########
+# LOGGING #
+###########
+
+# The callable to use to configure logging
+LOGGING_CONFIG = "logging.config.dictConfig"
+
+# Custom logging configuration.
+LOGGING = {}
+
+# Default exception reporter class used in case none has been
+# specifically assigned to the HttpRequest instance.
+DEFAULT_EXCEPTION_REPORTER = "django.views.debug.ExceptionReporter"
+
+# Default exception reporter filter class used in case none has been
+# specifically assigned to the HttpRequest instance.
+DEFAULT_EXCEPTION_REPORTER_FILTER = "django.views.debug.SafeExceptionReporterFilter"
+
+###########
+# TESTING #
+###########
+
+# The name of the class to use to run the test suite
+TEST_RUNNER = "django.test.runner.DiscoverRunner"
+
+# Apps that don't need to be serialized at test database creation time
+# (only apps with migrations are to start with)
+TEST_NON_SERIALIZED_APPS = []
+
+############
+# FIXTURES #
+############
+
+# The list of directories to search for fixtures
+FIXTURE_DIRS = []
+
+###############
+# STATICFILES #
+###############
+
+# A list of locations of additional static files
+STATICFILES_DIRS = []
+
+# The default file storage backend used during the build process
+STATICFILES_STORAGE = "django.contrib.staticfiles.storage.StaticFilesStorage"
+
+# List of finder classes that know how to find static files in
+# various locations.
+STATICFILES_FINDERS = [
+ "django.contrib.staticfiles.finders.FileSystemFinder",
+ "django.contrib.staticfiles.finders.AppDirectoriesFinder",
+ # 'django.contrib.staticfiles.finders.DefaultStorageFinder',
+]
+
+##############
+# MIGRATIONS #
+##############
+
+# Migration module overrides for apps, by app label.
+MIGRATION_MODULES = {}
+
+#################
+# SYSTEM CHECKS #
+#################
+
+# List of all issues generated by system checks that should be silenced. Light
+# issues like warnings, infos or debugs will not generate a message. Silencing
+# serious issues like errors and criticals does not result in hiding the
+# message, but Django will not stop you from e.g. running server.
+SILENCED_SYSTEM_CHECKS = []
+
+#######################
+# SECURITY MIDDLEWARE #
+#######################
+SECURE_CONTENT_TYPE_NOSNIFF = True
+SECURE_CROSS_ORIGIN_OPENER_POLICY = "same-origin"
+SECURE_HSTS_INCLUDE_SUBDOMAINS = False
+SECURE_HSTS_PRELOAD = False
+SECURE_HSTS_SECONDS = 0
+SECURE_REDIRECT_EXEMPT = []
+SECURE_REFERRER_POLICY = "same-origin"
+SECURE_SSL_HOST = None
+SECURE_SSL_REDIRECT = False
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/__init__.py
new file mode 100644
index 00000000..c679ec3a
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/__init__.py
@@ -0,0 +1,623 @@
+"""
+LANG_INFO is a dictionary structure to provide meta information about languages.
+
+About name_local: capitalize it as if your language name was appearing
+inside a sentence in your language.
+The 'fallback' key can be used to specify a special fallback logic which doesn't
+follow the traditional 'fr-ca' -> 'fr' fallback logic.
+"""
+
+LANG_INFO = {
+ "af": {
+ "bidi": False,
+ "code": "af",
+ "name": "Afrikaans",
+ "name_local": "Afrikaans",
+ },
+ "ar": {
+ "bidi": True,
+ "code": "ar",
+ "name": "Arabic",
+ "name_local": "العربيّة",
+ },
+ "ar-dz": {
+ "bidi": True,
+ "code": "ar-dz",
+ "name": "Algerian Arabic",
+ "name_local": "العربية الجزائرية",
+ },
+ "ast": {
+ "bidi": False,
+ "code": "ast",
+ "name": "Asturian",
+ "name_local": "asturianu",
+ },
+ "az": {
+ "bidi": True,
+ "code": "az",
+ "name": "Azerbaijani",
+ "name_local": "Azərbaycanca",
+ },
+ "be": {
+ "bidi": False,
+ "code": "be",
+ "name": "Belarusian",
+ "name_local": "беларуская",
+ },
+ "bg": {
+ "bidi": False,
+ "code": "bg",
+ "name": "Bulgarian",
+ "name_local": "български",
+ },
+ "bn": {
+ "bidi": False,
+ "code": "bn",
+ "name": "Bengali",
+ "name_local": "বাংলা",
+ },
+ "br": {
+ "bidi": False,
+ "code": "br",
+ "name": "Breton",
+ "name_local": "brezhoneg",
+ },
+ "bs": {
+ "bidi": False,
+ "code": "bs",
+ "name": "Bosnian",
+ "name_local": "bosanski",
+ },
+ "ca": {
+ "bidi": False,
+ "code": "ca",
+ "name": "Catalan",
+ "name_local": "català",
+ },
+ "ckb": {
+ "bidi": True,
+ "code": "ckb",
+ "name": "Central Kurdish (Sorani)",
+ "name_local": "کوردی",
+ },
+ "cs": {
+ "bidi": False,
+ "code": "cs",
+ "name": "Czech",
+ "name_local": "česky",
+ },
+ "cy": {
+ "bidi": False,
+ "code": "cy",
+ "name": "Welsh",
+ "name_local": "Cymraeg",
+ },
+ "da": {
+ "bidi": False,
+ "code": "da",
+ "name": "Danish",
+ "name_local": "dansk",
+ },
+ "de": {
+ "bidi": False,
+ "code": "de",
+ "name": "German",
+ "name_local": "Deutsch",
+ },
+ "dsb": {
+ "bidi": False,
+ "code": "dsb",
+ "name": "Lower Sorbian",
+ "name_local": "dolnoserbski",
+ },
+ "el": {
+ "bidi": False,
+ "code": "el",
+ "name": "Greek",
+ "name_local": "Ελληνικά",
+ },
+ "en": {
+ "bidi": False,
+ "code": "en",
+ "name": "English",
+ "name_local": "English",
+ },
+ "en-au": {
+ "bidi": False,
+ "code": "en-au",
+ "name": "Australian English",
+ "name_local": "Australian English",
+ },
+ "en-gb": {
+ "bidi": False,
+ "code": "en-gb",
+ "name": "British English",
+ "name_local": "British English",
+ },
+ "eo": {
+ "bidi": False,
+ "code": "eo",
+ "name": "Esperanto",
+ "name_local": "Esperanto",
+ },
+ "es": {
+ "bidi": False,
+ "code": "es",
+ "name": "Spanish",
+ "name_local": "español",
+ },
+ "es-ar": {
+ "bidi": False,
+ "code": "es-ar",
+ "name": "Argentinian Spanish",
+ "name_local": "español de Argentina",
+ },
+ "es-co": {
+ "bidi": False,
+ "code": "es-co",
+ "name": "Colombian Spanish",
+ "name_local": "español de Colombia",
+ },
+ "es-mx": {
+ "bidi": False,
+ "code": "es-mx",
+ "name": "Mexican Spanish",
+ "name_local": "español de Mexico",
+ },
+ "es-ni": {
+ "bidi": False,
+ "code": "es-ni",
+ "name": "Nicaraguan Spanish",
+ "name_local": "español de Nicaragua",
+ },
+ "es-ve": {
+ "bidi": False,
+ "code": "es-ve",
+ "name": "Venezuelan Spanish",
+ "name_local": "español de Venezuela",
+ },
+ "et": {
+ "bidi": False,
+ "code": "et",
+ "name": "Estonian",
+ "name_local": "eesti",
+ },
+ "eu": {
+ "bidi": False,
+ "code": "eu",
+ "name": "Basque",
+ "name_local": "Basque",
+ },
+ "fa": {
+ "bidi": True,
+ "code": "fa",
+ "name": "Persian",
+ "name_local": "فارسی",
+ },
+ "fi": {
+ "bidi": False,
+ "code": "fi",
+ "name": "Finnish",
+ "name_local": "suomi",
+ },
+ "fr": {
+ "bidi": False,
+ "code": "fr",
+ "name": "French",
+ "name_local": "français",
+ },
+ "fy": {
+ "bidi": False,
+ "code": "fy",
+ "name": "Frisian",
+ "name_local": "frysk",
+ },
+ "ga": {
+ "bidi": False,
+ "code": "ga",
+ "name": "Irish",
+ "name_local": "Gaeilge",
+ },
+ "gd": {
+ "bidi": False,
+ "code": "gd",
+ "name": "Scottish Gaelic",
+ "name_local": "Gàidhlig",
+ },
+ "gl": {
+ "bidi": False,
+ "code": "gl",
+ "name": "Galician",
+ "name_local": "galego",
+ },
+ "he": {
+ "bidi": True,
+ "code": "he",
+ "name": "Hebrew",
+ "name_local": "עברית",
+ },
+ "hi": {
+ "bidi": False,
+ "code": "hi",
+ "name": "Hindi",
+ "name_local": "हिंदी",
+ },
+ "hr": {
+ "bidi": False,
+ "code": "hr",
+ "name": "Croatian",
+ "name_local": "Hrvatski",
+ },
+ "hsb": {
+ "bidi": False,
+ "code": "hsb",
+ "name": "Upper Sorbian",
+ "name_local": "hornjoserbsce",
+ },
+ "hu": {
+ "bidi": False,
+ "code": "hu",
+ "name": "Hungarian",
+ "name_local": "Magyar",
+ },
+ "hy": {
+ "bidi": False,
+ "code": "hy",
+ "name": "Armenian",
+ "name_local": "հայերեն",
+ },
+ "ia": {
+ "bidi": False,
+ "code": "ia",
+ "name": "Interlingua",
+ "name_local": "Interlingua",
+ },
+ "io": {
+ "bidi": False,
+ "code": "io",
+ "name": "Ido",
+ "name_local": "ido",
+ },
+ "id": {
+ "bidi": False,
+ "code": "id",
+ "name": "Indonesian",
+ "name_local": "Bahasa Indonesia",
+ },
+ "ig": {
+ "bidi": False,
+ "code": "ig",
+ "name": "Igbo",
+ "name_local": "Asụsụ Ìgbò",
+ },
+ "is": {
+ "bidi": False,
+ "code": "is",
+ "name": "Icelandic",
+ "name_local": "Íslenska",
+ },
+ "it": {
+ "bidi": False,
+ "code": "it",
+ "name": "Italian",
+ "name_local": "italiano",
+ },
+ "ja": {
+ "bidi": False,
+ "code": "ja",
+ "name": "Japanese",
+ "name_local": "日本語",
+ },
+ "ka": {
+ "bidi": False,
+ "code": "ka",
+ "name": "Georgian",
+ "name_local": "ქართული",
+ },
+ "kab": {
+ "bidi": False,
+ "code": "kab",
+ "name": "Kabyle",
+ "name_local": "taqbaylit",
+ },
+ "kk": {
+ "bidi": False,
+ "code": "kk",
+ "name": "Kazakh",
+ "name_local": "Қазақ",
+ },
+ "km": {
+ "bidi": False,
+ "code": "km",
+ "name": "Khmer",
+ "name_local": "Khmer",
+ },
+ "kn": {
+ "bidi": False,
+ "code": "kn",
+ "name": "Kannada",
+ "name_local": "Kannada",
+ },
+ "ko": {
+ "bidi": False,
+ "code": "ko",
+ "name": "Korean",
+ "name_local": "한국어",
+ },
+ "ky": {
+ "bidi": False,
+ "code": "ky",
+ "name": "Kyrgyz",
+ "name_local": "Кыргызча",
+ },
+ "lb": {
+ "bidi": False,
+ "code": "lb",
+ "name": "Luxembourgish",
+ "name_local": "Lëtzebuergesch",
+ },
+ "lt": {
+ "bidi": False,
+ "code": "lt",
+ "name": "Lithuanian",
+ "name_local": "Lietuviškai",
+ },
+ "lv": {
+ "bidi": False,
+ "code": "lv",
+ "name": "Latvian",
+ "name_local": "latviešu",
+ },
+ "mk": {
+ "bidi": False,
+ "code": "mk",
+ "name": "Macedonian",
+ "name_local": "Македонски",
+ },
+ "ml": {
+ "bidi": False,
+ "code": "ml",
+ "name": "Malayalam",
+ "name_local": "മലയാളം",
+ },
+ "mn": {
+ "bidi": False,
+ "code": "mn",
+ "name": "Mongolian",
+ "name_local": "Mongolian",
+ },
+ "mr": {
+ "bidi": False,
+ "code": "mr",
+ "name": "Marathi",
+ "name_local": "मराठी",
+ },
+ "ms": {
+ "bidi": False,
+ "code": "ms",
+ "name": "Malay",
+ "name_local": "Bahasa Melayu",
+ },
+ "my": {
+ "bidi": False,
+ "code": "my",
+ "name": "Burmese",
+ "name_local": "မြန်မာဘာသာ",
+ },
+ "nb": {
+ "bidi": False,
+ "code": "nb",
+ "name": "Norwegian Bokmal",
+ "name_local": "norsk (bokmål)",
+ },
+ "ne": {
+ "bidi": False,
+ "code": "ne",
+ "name": "Nepali",
+ "name_local": "नेपाली",
+ },
+ "nl": {
+ "bidi": False,
+ "code": "nl",
+ "name": "Dutch",
+ "name_local": "Nederlands",
+ },
+ "nn": {
+ "bidi": False,
+ "code": "nn",
+ "name": "Norwegian Nynorsk",
+ "name_local": "norsk (nynorsk)",
+ },
+ "no": {
+ "bidi": False,
+ "code": "no",
+ "name": "Norwegian",
+ "name_local": "norsk",
+ },
+ "os": {
+ "bidi": False,
+ "code": "os",
+ "name": "Ossetic",
+ "name_local": "Ирон",
+ },
+ "pa": {
+ "bidi": False,
+ "code": "pa",
+ "name": "Punjabi",
+ "name_local": "Punjabi",
+ },
+ "pl": {
+ "bidi": False,
+ "code": "pl",
+ "name": "Polish",
+ "name_local": "polski",
+ },
+ "pt": {
+ "bidi": False,
+ "code": "pt",
+ "name": "Portuguese",
+ "name_local": "Português",
+ },
+ "pt-br": {
+ "bidi": False,
+ "code": "pt-br",
+ "name": "Brazilian Portuguese",
+ "name_local": "Português Brasileiro",
+ },
+ "ro": {
+ "bidi": False,
+ "code": "ro",
+ "name": "Romanian",
+ "name_local": "Română",
+ },
+ "ru": {
+ "bidi": False,
+ "code": "ru",
+ "name": "Russian",
+ "name_local": "Русский",
+ },
+ "sk": {
+ "bidi": False,
+ "code": "sk",
+ "name": "Slovak",
+ "name_local": "Slovensky",
+ },
+ "sl": {
+ "bidi": False,
+ "code": "sl",
+ "name": "Slovenian",
+ "name_local": "Slovenščina",
+ },
+ "sq": {
+ "bidi": False,
+ "code": "sq",
+ "name": "Albanian",
+ "name_local": "shqip",
+ },
+ "sr": {
+ "bidi": False,
+ "code": "sr",
+ "name": "Serbian",
+ "name_local": "српски",
+ },
+ "sr-latn": {
+ "bidi": False,
+ "code": "sr-latn",
+ "name": "Serbian Latin",
+ "name_local": "srpski (latinica)",
+ },
+ "sv": {
+ "bidi": False,
+ "code": "sv",
+ "name": "Swedish",
+ "name_local": "svenska",
+ },
+ "sw": {
+ "bidi": False,
+ "code": "sw",
+ "name": "Swahili",
+ "name_local": "Kiswahili",
+ },
+ "ta": {
+ "bidi": False,
+ "code": "ta",
+ "name": "Tamil",
+ "name_local": "தமிழ்",
+ },
+ "te": {
+ "bidi": False,
+ "code": "te",
+ "name": "Telugu",
+ "name_local": "తెలుగు",
+ },
+ "tg": {
+ "bidi": False,
+ "code": "tg",
+ "name": "Tajik",
+ "name_local": "тоҷикӣ",
+ },
+ "th": {
+ "bidi": False,
+ "code": "th",
+ "name": "Thai",
+ "name_local": "ภาษาไทย",
+ },
+ "tk": {
+ "bidi": False,
+ "code": "tk",
+ "name": "Turkmen",
+ "name_local": "Türkmençe",
+ },
+ "tr": {
+ "bidi": False,
+ "code": "tr",
+ "name": "Turkish",
+ "name_local": "Türkçe",
+ },
+ "tt": {
+ "bidi": False,
+ "code": "tt",
+ "name": "Tatar",
+ "name_local": "Татарча",
+ },
+ "udm": {
+ "bidi": False,
+ "code": "udm",
+ "name": "Udmurt",
+ "name_local": "Удмурт",
+ },
+ "uk": {
+ "bidi": False,
+ "code": "uk",
+ "name": "Ukrainian",
+ "name_local": "Українська",
+ },
+ "ur": {
+ "bidi": True,
+ "code": "ur",
+ "name": "Urdu",
+ "name_local": "اردو",
+ },
+ "uz": {
+ "bidi": False,
+ "code": "uz",
+ "name": "Uzbek",
+ "name_local": "oʻzbek tili",
+ },
+ "vi": {
+ "bidi": False,
+ "code": "vi",
+ "name": "Vietnamese",
+ "name_local": "Tiếng Việt",
+ },
+ "zh-cn": {
+ "fallback": ["zh-hans"],
+ },
+ "zh-hans": {
+ "bidi": False,
+ "code": "zh-hans",
+ "name": "Simplified Chinese",
+ "name_local": "简体中文",
+ },
+ "zh-hant": {
+ "bidi": False,
+ "code": "zh-hant",
+ "name": "Traditional Chinese",
+ "name_local": "繁體中文",
+ },
+ "zh-hk": {
+ "fallback": ["zh-hant"],
+ },
+ "zh-mo": {
+ "fallback": ["zh-hant"],
+ },
+ "zh-my": {
+ "fallback": ["zh-hans"],
+ },
+ "zh-sg": {
+ "fallback": ["zh-hans"],
+ },
+ "zh-tw": {
+ "fallback": ["zh-hant"],
+ },
+}
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/af/LC_MESSAGES/django.mo b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/af/LC_MESSAGES/django.mo
new file mode 100644
index 00000000..60608b46
Binary files /dev/null and b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/af/LC_MESSAGES/django.mo differ
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/af/LC_MESSAGES/django.po b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/af/LC_MESSAGES/django.po
new file mode 100644
index 00000000..48636938
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/af/LC_MESSAGES/django.po
@@ -0,0 +1,1298 @@
+# This file is distributed under the same license as the Django package.
+#
+# Translators:
+# F Wolff , 2019-2020,2022
+# Stephen Cox , 2011-2012
+# unklphil , 2014,2019
+msgid ""
+msgstr ""
+"Project-Id-Version: django\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2022-05-17 05:23-0500\n"
+"PO-Revision-Date: 2022-07-25 06:49+0000\n"
+"Last-Translator: F Wolff \n"
+"Language-Team: Afrikaans (http://www.transifex.com/django/django/language/"
+"af/)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Language: af\n"
+"Plural-Forms: nplurals=2; plural=(n != 1);\n"
+
+msgid "Afrikaans"
+msgstr "Afrikaans"
+
+msgid "Arabic"
+msgstr "Arabies"
+
+msgid "Algerian Arabic"
+msgstr ""
+
+msgid "Asturian"
+msgstr "Asturies"
+
+msgid "Azerbaijani"
+msgstr "Aserbeidjans"
+
+msgid "Bulgarian"
+msgstr "Bulgaars"
+
+msgid "Belarusian"
+msgstr "Wit-Russies"
+
+msgid "Bengali"
+msgstr "Bengali"
+
+msgid "Breton"
+msgstr "Bretons"
+
+msgid "Bosnian"
+msgstr "Bosnies"
+
+msgid "Catalan"
+msgstr "Katalaans"
+
+msgid "Czech"
+msgstr "Tsjeggies"
+
+msgid "Welsh"
+msgstr "Wallies"
+
+msgid "Danish"
+msgstr "Deens"
+
+msgid "German"
+msgstr "Duits"
+
+msgid "Lower Sorbian"
+msgstr "Neder-Sorbies"
+
+msgid "Greek"
+msgstr "Grieks"
+
+msgid "English"
+msgstr "Engels"
+
+msgid "Australian English"
+msgstr "Australiese Engels"
+
+msgid "British English"
+msgstr "Britse Engels"
+
+msgid "Esperanto"
+msgstr "Esperanto"
+
+msgid "Spanish"
+msgstr "Spaans"
+
+msgid "Argentinian Spanish"
+msgstr "Argentynse Spaans"
+
+msgid "Colombian Spanish"
+msgstr "Kolombiaanse Spaans"
+
+msgid "Mexican Spanish"
+msgstr "Meksikaanse Spaans"
+
+msgid "Nicaraguan Spanish"
+msgstr "Nicaraguaanse Spaans"
+
+msgid "Venezuelan Spanish"
+msgstr "Venezolaanse Spaans"
+
+msgid "Estonian"
+msgstr "Estnies"
+
+msgid "Basque"
+msgstr "Baskies"
+
+msgid "Persian"
+msgstr "Persies"
+
+msgid "Finnish"
+msgstr "Fins"
+
+msgid "French"
+msgstr "Fraans"
+
+msgid "Frisian"
+msgstr "Fries"
+
+msgid "Irish"
+msgstr "Iers"
+
+msgid "Scottish Gaelic"
+msgstr "Skots-Gaelies"
+
+msgid "Galician"
+msgstr "Galicies"
+
+msgid "Hebrew"
+msgstr "Hebreeus"
+
+msgid "Hindi"
+msgstr "Hindoe"
+
+msgid "Croatian"
+msgstr "Kroaties"
+
+msgid "Upper Sorbian"
+msgstr "Opper-Sorbies"
+
+msgid "Hungarian"
+msgstr "Hongaars"
+
+msgid "Armenian"
+msgstr "Armeens"
+
+msgid "Interlingua"
+msgstr "Interlingua"
+
+msgid "Indonesian"
+msgstr "Indonesies"
+
+msgid "Igbo"
+msgstr ""
+
+msgid "Ido"
+msgstr "Ido"
+
+msgid "Icelandic"
+msgstr "Yslands"
+
+msgid "Italian"
+msgstr "Italiaans"
+
+msgid "Japanese"
+msgstr "Japannees"
+
+msgid "Georgian"
+msgstr "Georgian"
+
+msgid "Kabyle"
+msgstr "Kabilies"
+
+msgid "Kazakh"
+msgstr "Kazakh"
+
+msgid "Khmer"
+msgstr "Khmer"
+
+msgid "Kannada"
+msgstr "Kannada"
+
+msgid "Korean"
+msgstr "Koreaans"
+
+msgid "Kyrgyz"
+msgstr ""
+
+msgid "Luxembourgish"
+msgstr "Luxemburgs"
+
+msgid "Lithuanian"
+msgstr "Litaus"
+
+msgid "Latvian"
+msgstr "Lets"
+
+msgid "Macedonian"
+msgstr "Macedonies"
+
+msgid "Malayalam"
+msgstr "Malabaars"
+
+msgid "Mongolian"
+msgstr "Mongools"
+
+msgid "Marathi"
+msgstr "Marathi"
+
+msgid "Malay"
+msgstr "Maleisies"
+
+msgid "Burmese"
+msgstr "Birmaans"
+
+msgid "Norwegian Bokmål"
+msgstr "Noorweegse Bokmål"
+
+msgid "Nepali"
+msgstr "Nepalees"
+
+msgid "Dutch"
+msgstr "Nederlands"
+
+msgid "Norwegian Nynorsk"
+msgstr "Noorweegse Nynorsk"
+
+msgid "Ossetic"
+msgstr "Osseties"
+
+msgid "Punjabi"
+msgstr "Punjabi"
+
+msgid "Polish"
+msgstr "Pools"
+
+msgid "Portuguese"
+msgstr "Portugees"
+
+msgid "Brazilian Portuguese"
+msgstr "Brasiliaanse Portugees"
+
+msgid "Romanian"
+msgstr "Roemeens"
+
+msgid "Russian"
+msgstr "Russiese"
+
+msgid "Slovak"
+msgstr "Slowaaks"
+
+msgid "Slovenian"
+msgstr "Sloweens"
+
+msgid "Albanian"
+msgstr "Albanees"
+
+msgid "Serbian"
+msgstr "Serwies"
+
+msgid "Serbian Latin"
+msgstr "Serwies Latyns"
+
+msgid "Swedish"
+msgstr "Sweeds"
+
+msgid "Swahili"
+msgstr "Swahili"
+
+msgid "Tamil"
+msgstr "Tamil"
+
+msgid "Telugu"
+msgstr "Teloegoe"
+
+msgid "Tajik"
+msgstr ""
+
+msgid "Thai"
+msgstr "Thai"
+
+msgid "Turkmen"
+msgstr ""
+
+msgid "Turkish"
+msgstr "Turks"
+
+msgid "Tatar"
+msgstr "Tataars"
+
+msgid "Udmurt"
+msgstr "Oedmoerts"
+
+msgid "Ukrainian"
+msgstr "Oekraïens"
+
+msgid "Urdu"
+msgstr "Oerdoe"
+
+msgid "Uzbek"
+msgstr "Oesbekies "
+
+msgid "Vietnamese"
+msgstr "Viëtnamees"
+
+msgid "Simplified Chinese"
+msgstr "Vereenvoudigde Sjinees"
+
+msgid "Traditional Chinese"
+msgstr "Tradisionele Sjinees"
+
+msgid "Messages"
+msgstr "Boodskappe"
+
+msgid "Site Maps"
+msgstr "Werfkaarte"
+
+msgid "Static Files"
+msgstr "Statiese lêers"
+
+msgid "Syndication"
+msgstr "Sindikasie"
+
+#. Translators: String used to replace omitted page numbers in elided page
+#. range generated by paginators, e.g. [1, 2, '…', 5, 6, 7, '…', 9, 10].
+msgid "…"
+msgstr "…"
+
+msgid "That page number is not an integer"
+msgstr "Daai bladsynommer is nie ’n heelgetal nie"
+
+msgid "That page number is less than 1"
+msgstr "Daai bladsynommer is minder as 1"
+
+msgid "That page contains no results"
+msgstr "Daai bladsy bevat geen resultate nie"
+
+msgid "Enter a valid value."
+msgstr "Gee ’n geldige waarde."
+
+msgid "Enter a valid URL."
+msgstr "Gee ’n geldige URL."
+
+msgid "Enter a valid integer."
+msgstr "Gee ’n geldige heelgetal."
+
+msgid "Enter a valid email address."
+msgstr "Gee ’n geldige e-posadres."
+
+#. Translators: "letters" means latin letters: a-z and A-Z.
+msgid ""
+"Enter a valid “slug” consisting of letters, numbers, underscores or hyphens."
+msgstr ""
+
+msgid ""
+"Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or "
+"hyphens."
+msgstr ""
+
+msgid "Enter a valid IPv4 address."
+msgstr "Gee ’n geldige IPv4-adres."
+
+msgid "Enter a valid IPv6 address."
+msgstr "Gee ’n geldige IPv6-adres."
+
+msgid "Enter a valid IPv4 or IPv6 address."
+msgstr "Gee ’n geldige IPv4- of IPv6-adres."
+
+msgid "Enter only digits separated by commas."
+msgstr "Gee slegs syfers wat deur kommas geskei is."
+
+#, python-format
+msgid "Ensure this value is %(limit_value)s (it is %(show_value)s)."
+msgstr ""
+"Maak seker dat hierdie waarde %(limit_value)s is (dit is %(show_value)s)."
+
+#, python-format
+msgid "Ensure this value is less than or equal to %(limit_value)s."
+msgstr "Maak seker dat hierdie waarde kleiner of gelyk is aan %(limit_value)s."
+
+#, python-format
+msgid "Ensure this value is greater than or equal to %(limit_value)s."
+msgstr "Maak seker dat hierdie waarde groter of gelyk is aan %(limit_value)s."
+
+#, python-format
+msgid "Ensure this value is a multiple of step size %(limit_value)s."
+msgstr ""
+"Maak seker dat hierdie waarde ’n veelvoud is van stapgrootte %(limit_value)s."
+
+#, python-format
+msgid ""
+"Ensure this value has at least %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at least %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+"Maak seker hierdie waarde het ten minste %(limit_value)d karakter (dit het "
+"%(show_value)d)."
+msgstr[1] ""
+"Maak seker hierdie waarde het ten minste %(limit_value)d karakters (dit het "
+"%(show_value)d)."
+
+#, python-format
+msgid ""
+"Ensure this value has at most %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at most %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+"Maak seker hierdie waarde het op die meeste %(limit_value)d karakter (dit "
+"het %(show_value)d)."
+msgstr[1] ""
+"Maak seker hierdie waarde het op die meeste %(limit_value)d karakters (dit "
+"het %(show_value)d)."
+
+msgid "Enter a number."
+msgstr "Gee ’n getal."
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s digit in total."
+msgid_plural "Ensure that there are no more than %(max)s digits in total."
+msgstr[0] "Maak seker dat daar nie meer as %(max)s syfer in totaal is nie."
+msgstr[1] "Maak seker dat daar nie meer as %(max)s syfers in totaal is nie."
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s decimal place."
+msgid_plural "Ensure that there are no more than %(max)s decimal places."
+msgstr[0] "Maak seker dat daar nie meer as %(max)s desimale plek is nie."
+msgstr[1] "Maak seker dat daar nie meer as %(max)s desimale plekke is nie."
+
+#, python-format
+msgid ""
+"Ensure that there are no more than %(max)s digit before the decimal point."
+msgid_plural ""
+"Ensure that there are no more than %(max)s digits before the decimal point."
+msgstr[0] ""
+"Maak seker dat daar nie meer as %(max)s syfer voor die desimale punt is nie."
+msgstr[1] ""
+"Maak seker dat daar nie meer as %(max)s syfers voor die desimale punt is nie."
+
+#, python-format
+msgid ""
+"File extension “%(extension)s” is not allowed. Allowed extensions are: "
+"%(allowed_extensions)s."
+msgstr ""
+"Lêeruitbreiding “%(extension)s” word nie toegelaat nie. Toegelate "
+"uitbreidings is: %(allowed_extensions)s."
+
+msgid "Null characters are not allowed."
+msgstr "Nul-karakters word nie toegelaat nie."
+
+msgid "and"
+msgstr "en"
+
+#, python-format
+msgid "%(model_name)s with this %(field_labels)s already exists."
+msgstr "%(model_name)s met hierdie %(field_labels)s bestaan alreeds."
+
+#, python-format
+msgid "Constraint “%(name)s” is violated."
+msgstr "Beperking “%(name)s” word verbreek."
+
+#, python-format
+msgid "Value %(value)r is not a valid choice."
+msgstr "Waarde %(value)r is nie ’n geldige keuse nie."
+
+msgid "This field cannot be null."
+msgstr "Hierdie veld kan nie nil wees nie."
+
+msgid "This field cannot be blank."
+msgstr "Hierdie veld kan nie leeg wees nie."
+
+#, python-format
+msgid "%(model_name)s with this %(field_label)s already exists."
+msgstr "%(model_name)s met hierdie %(field_label)s bestaan alreeds."
+
+#. Translators: The 'lookup_type' is one of 'date', 'year' or
+#. 'month'. Eg: "Title must be unique for pub_date year"
+#, python-format
+msgid ""
+"%(field_label)s must be unique for %(date_field_label)s %(lookup_type)s."
+msgstr ""
+"%(field_label)s moet uniek wees per %(date_field_label)s %(lookup_type)s."
+
+#, python-format
+msgid "Field of type: %(field_type)s"
+msgstr "Veld van tipe: %(field_type)s "
+
+#, python-format
+msgid "“%(value)s” value must be either True or False."
+msgstr "“%(value)s” waarde moet óf True óf False wees."
+
+#, python-format
+msgid "“%(value)s” value must be either True, False, or None."
+msgstr "Die waarde “%(value)s” moet True, False of None wees."
+
+msgid "Boolean (Either True or False)"
+msgstr "Boole (True of False)"
+
+#, python-format
+msgid "String (up to %(max_length)s)"
+msgstr "String (hoogstens %(max_length)s karakters)"
+
+msgid "Comma-separated integers"
+msgstr "Heelgetalle geskei met kommas"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid date format. It must be in YYYY-MM-DD "
+"format."
+msgstr ""
+"Die waarde “%(value)s” het ’n ongeldige datumformaat. Dit moet in die "
+"formaat JJJJ-MM-DD wees."
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD) but it is an invalid "
+"date."
+msgstr ""
+"Die waarde “%(value)s” het die korrekte formaat (JJJJ-MM-DD), maar dit is ’n "
+"ongeldige datum."
+
+msgid "Date (without time)"
+msgstr "Datum (sonder die tyd)"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in YYYY-MM-DD HH:MM[:ss[."
+"uuuuuu]][TZ] format."
+msgstr ""
+"Die waarde “%(value)s” se formaat is ongeldig. Dit moet in die formaat JJJJ-"
+"MM-DD HH:MM[:ss[.uuuuuu]][TZ] wees."
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD HH:MM[:ss[.uuuuuu]]"
+"[TZ]) but it is an invalid date/time."
+msgstr ""
+"Die waarde “%(value)s” het die korrekte formaat (JJJJ-MM-DD HH:MM[:ss[."
+"uuuuuu]][TZ]) maar dit is ’n ongeldige datum/tyd."
+
+msgid "Date (with time)"
+msgstr "Datum (met die tyd)"
+
+#, python-format
+msgid "“%(value)s” value must be a decimal number."
+msgstr "“%(value)s”-waarde moet ’n desimale getal wees."
+
+msgid "Decimal number"
+msgstr "Desimale getal"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in [DD] [[HH:]MM:]ss[."
+"uuuuuu] format."
+msgstr ""
+"Die waarde “%(value)s” het ’n ongeldige formaat. Dit moet in die formaat "
+"[DD] [HH:[MM:]]ss[.uuuuuu] wees."
+
+msgid "Duration"
+msgstr "Duur"
+
+msgid "Email address"
+msgstr "E-posadres"
+
+msgid "File path"
+msgstr "Lêerpad"
+
+#, python-format
+msgid "“%(value)s” value must be a float."
+msgstr "Die waarde “%(value)s” moet ’n dryfpuntgetal wees."
+
+msgid "Floating point number"
+msgstr "Dryfpuntgetal"
+
+#, python-format
+msgid "“%(value)s” value must be an integer."
+msgstr "Die waarde “%(value)s” moet ’n heelgetal wees."
+
+msgid "Integer"
+msgstr "Heelgetal"
+
+msgid "Big (8 byte) integer"
+msgstr "Groot (8 greep) heelgetal"
+
+msgid "Small integer"
+msgstr "Klein heelgetal"
+
+msgid "IPv4 address"
+msgstr "IPv4-adres"
+
+msgid "IP address"
+msgstr "IP-adres"
+
+#, python-format
+msgid "“%(value)s” value must be either None, True or False."
+msgstr "“%(value)s”-waarde moet een wees uit None, True of False."
+
+msgid "Boolean (Either True, False or None)"
+msgstr "Boole (True, False, of None)"
+
+msgid "Positive big integer"
+msgstr "Positiewe groot heelgetal"
+
+msgid "Positive integer"
+msgstr "Positiewe heelgetal"
+
+msgid "Positive small integer"
+msgstr "Klein positiewe heelgetal"
+
+#, python-format
+msgid "Slug (up to %(max_length)s)"
+msgstr "Slug (tot en met %(max_length)s karakters)"
+
+msgid "Text"
+msgstr "Teks"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in HH:MM[:ss[.uuuuuu]] "
+"format."
+msgstr ""
+"“%(value)s”-waarde het ’n ongeldige formaat. Dit moet geformateer word as HH:"
+"MM[:ss[.uuuuuu]]."
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (HH:MM[:ss[.uuuuuu]]) but it is an "
+"invalid time."
+msgstr ""
+"Die waarde “%(value)s” het die regte formaat (HH:MM[:ss[.uuuuuu]]) maar is "
+"nie ’n geldige tyd nie."
+
+msgid "Time"
+msgstr "Tyd"
+
+msgid "URL"
+msgstr "URL"
+
+msgid "Raw binary data"
+msgstr "Rou binêre data"
+
+#, python-format
+msgid "“%(value)s” is not a valid UUID."
+msgstr "“%(value)s” is nie ’n geldige UUID nie."
+
+msgid "Universally unique identifier"
+msgstr "Universeel unieke identifiseerder"
+
+msgid "File"
+msgstr "Lêer"
+
+msgid "Image"
+msgstr "Prent"
+
+msgid "A JSON object"
+msgstr "’n JSON-objek"
+
+msgid "Value must be valid JSON."
+msgstr "Waarde moet geldige JSON wees."
+
+#, python-format
+msgid "%(model)s instance with %(field)s %(value)r does not exist."
+msgstr "%(model)s-objek met %(field)s %(value)r bestaan nie."
+
+msgid "Foreign Key (type determined by related field)"
+msgstr "Vreemde sleutel (tipe bepaal deur verwante veld)"
+
+msgid "One-to-one relationship"
+msgstr "Een-tot-een-verhouding"
+
+#, python-format
+msgid "%(from)s-%(to)s relationship"
+msgstr "%(from)s-%(to)s-verwantskap"
+
+#, python-format
+msgid "%(from)s-%(to)s relationships"
+msgstr "%(from)s-%(to)s-verwantskappe"
+
+msgid "Many-to-many relationship"
+msgstr "Baie-tot-baie-verwantskap"
+
+#. Translators: If found as last label character, these punctuation
+#. characters will prevent the default label_suffix to be appended to the
+#. label
+msgid ":?.!"
+msgstr ":?.!"
+
+msgid "This field is required."
+msgstr "Dié veld is verpligtend."
+
+msgid "Enter a whole number."
+msgstr "Tik ’n heelgetal in."
+
+msgid "Enter a valid date."
+msgstr "Tik ’n geldige datum in."
+
+msgid "Enter a valid time."
+msgstr "Tik ’n geldige tyd in."
+
+msgid "Enter a valid date/time."
+msgstr "Tik ’n geldige datum/tyd in."
+
+msgid "Enter a valid duration."
+msgstr "Tik ’n geldige tydsduur in."
+
+#, python-brace-format
+msgid "The number of days must be between {min_days} and {max_days}."
+msgstr "Die aantal dae moet tussen {min_days} en {max_days} wees."
+
+msgid "No file was submitted. Check the encoding type on the form."
+msgstr ""
+"Geen lêer is ingedien nie. Maak seker die koderingtipe op die vorm is reg."
+
+msgid "No file was submitted."
+msgstr "Geen lêer is ingedien nie."
+
+msgid "The submitted file is empty."
+msgstr "Die ingediende lêer is leeg."
+
+#, python-format
+msgid "Ensure this filename has at most %(max)d character (it has %(length)d)."
+msgid_plural ""
+"Ensure this filename has at most %(max)d characters (it has %(length)d)."
+msgstr[0] ""
+"Maak seker hierdie lêernaam het hoogstens %(max)d karakter (dit het "
+"%(length)d)."
+msgstr[1] ""
+"Maak seker hierdie lêernaam het hoogstens %(max)d karakters (dit het "
+"%(length)d)."
+
+msgid "Please either submit a file or check the clear checkbox, not both."
+msgstr "Dien die lêer in óf merk die Maak skoon-boksie, nie altwee nie."
+
+msgid ""
+"Upload a valid image. The file you uploaded was either not an image or a "
+"corrupted image."
+msgstr ""
+"Laai ’n geldige prent. Die lêer wat jy opgelaai het, is nie ’n prent nie of "
+"dit is ’n korrupte prent."
+
+#, python-format
+msgid "Select a valid choice. %(value)s is not one of the available choices."
+msgstr ""
+"Kies ’n geldige keuse. %(value)s is nie een van die beskikbare keuses nie."
+
+msgid "Enter a list of values."
+msgstr "Tik ’n lys waardes in."
+
+msgid "Enter a complete value."
+msgstr "Tik ’n volledige waarde in."
+
+msgid "Enter a valid UUID."
+msgstr "Tik ’n geldig UUID in."
+
+msgid "Enter a valid JSON."
+msgstr "Gee geldige JSON."
+
+#. Translators: This is the default suffix added to form field labels
+msgid ":"
+msgstr ":"
+
+#, python-format
+msgid "(Hidden field %(name)s) %(error)s"
+msgstr "(Versteekte veld %(name)s) %(error)s"
+
+#, python-format
+msgid ""
+"ManagementForm data is missing or has been tampered with. Missing fields: "
+"%(field_names)s. You may need to file a bug report if the issue persists."
+msgstr ""
+
+#, python-format
+msgid "Please submit at most %(num)d form."
+msgid_plural "Please submit at most %(num)d forms."
+msgstr[0] "Dien asseblief hoogstens %(num)d vorm in."
+msgstr[1] "Dien asseblief hoogstens %(num)d vorms in."
+
+#, python-format
+msgid "Please submit at least %(num)d form."
+msgid_plural "Please submit at least %(num)d forms."
+msgstr[0] "Dien asseblief ten minste %(num)d vorm in."
+msgstr[1] "Dien asseblief ten minste %(num)d vorms in."
+
+msgid "Order"
+msgstr "Orde"
+
+msgid "Delete"
+msgstr "Verwyder"
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s."
+msgstr "Korrigeer die dubbele data vir %(field)s."
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s, which must be unique."
+msgstr "Korrigeer die dubbele data vir %(field)s, dit moet uniek wees."
+
+#, python-format
+msgid ""
+"Please correct the duplicate data for %(field_name)s which must be unique "
+"for the %(lookup)s in %(date_field)s."
+msgstr ""
+"Korrigeer die dubbele data vir %(field_name)s, dit moet uniek wees vir die "
+"%(lookup)s in %(date_field)s."
+
+msgid "Please correct the duplicate values below."
+msgstr "Korrigeer die dubbele waardes hieronder."
+
+msgid "The inline value did not match the parent instance."
+msgstr "Die waarde inlyn pas nie by die ouerobjek nie."
+
+msgid "Select a valid choice. That choice is not one of the available choices."
+msgstr ""
+"Kies ’n geldige keuse. Daardie keuse is nie een van die beskikbare keuses "
+"nie."
+
+#, python-format
+msgid "“%(pk)s” is not a valid value."
+msgstr "“%(pk)s” is nie ’n geldige waarde nie."
+
+#, python-format
+msgid ""
+"%(datetime)s couldn’t be interpreted in time zone %(current_timezone)s; it "
+"may be ambiguous or it may not exist."
+msgstr ""
+
+msgid "Clear"
+msgstr "Maak skoon"
+
+msgid "Currently"
+msgstr "Tans"
+
+msgid "Change"
+msgstr "Verander"
+
+msgid "Unknown"
+msgstr "Onbekend"
+
+msgid "Yes"
+msgstr "Ja"
+
+msgid "No"
+msgstr "Nee"
+
+#. Translators: Please do not add spaces around commas.
+msgid "yes,no,maybe"
+msgstr "ja,nee,miskien"
+
+#, python-format
+msgid "%(size)d byte"
+msgid_plural "%(size)d bytes"
+msgstr[0] "%(size)d greep"
+msgstr[1] "%(size)d grepe"
+
+#, python-format
+msgid "%s KB"
+msgstr "%s KB"
+
+#, python-format
+msgid "%s MB"
+msgstr "%s MB"
+
+#, python-format
+msgid "%s GB"
+msgstr "%s GB"
+
+#, python-format
+msgid "%s TB"
+msgstr "%s TB"
+
+#, python-format
+msgid "%s PB"
+msgstr "%s PB"
+
+msgid "p.m."
+msgstr "nm."
+
+msgid "a.m."
+msgstr "vm."
+
+msgid "PM"
+msgstr "NM"
+
+msgid "AM"
+msgstr "VM"
+
+msgid "midnight"
+msgstr "middernag"
+
+msgid "noon"
+msgstr "middag"
+
+msgid "Monday"
+msgstr "Maandag"
+
+msgid "Tuesday"
+msgstr "Dinsdag"
+
+msgid "Wednesday"
+msgstr "Woensdag"
+
+msgid "Thursday"
+msgstr "Donderdag"
+
+msgid "Friday"
+msgstr "Vrydag"
+
+msgid "Saturday"
+msgstr "Saterdag"
+
+msgid "Sunday"
+msgstr "Sondag"
+
+msgid "Mon"
+msgstr "Ma"
+
+msgid "Tue"
+msgstr "Di"
+
+msgid "Wed"
+msgstr "Wo"
+
+msgid "Thu"
+msgstr "Do"
+
+msgid "Fri"
+msgstr "Vr"
+
+msgid "Sat"
+msgstr "Sa"
+
+msgid "Sun"
+msgstr "So"
+
+msgid "January"
+msgstr "Januarie"
+
+msgid "February"
+msgstr "Februarie"
+
+msgid "March"
+msgstr "Maart"
+
+msgid "April"
+msgstr "April"
+
+msgid "May"
+msgstr "Mei"
+
+msgid "June"
+msgstr "Junie"
+
+msgid "July"
+msgstr "Julie"
+
+msgid "August"
+msgstr "Augustus"
+
+msgid "September"
+msgstr "September"
+
+msgid "October"
+msgstr "Oktober"
+
+msgid "November"
+msgstr "November"
+
+msgid "December"
+msgstr "Desember"
+
+msgid "jan"
+msgstr "jan"
+
+msgid "feb"
+msgstr "feb"
+
+msgid "mar"
+msgstr "mrt"
+
+msgid "apr"
+msgstr "apr"
+
+msgid "may"
+msgstr "mei"
+
+msgid "jun"
+msgstr "jun"
+
+msgid "jul"
+msgstr "jul"
+
+msgid "aug"
+msgstr "aug"
+
+msgid "sep"
+msgstr "sept"
+
+msgid "oct"
+msgstr "okt"
+
+msgid "nov"
+msgstr "nov"
+
+msgid "dec"
+msgstr "des"
+
+msgctxt "abbrev. month"
+msgid "Jan."
+msgstr "Jan."
+
+msgctxt "abbrev. month"
+msgid "Feb."
+msgstr "Feb."
+
+msgctxt "abbrev. month"
+msgid "March"
+msgstr "Maart"
+
+msgctxt "abbrev. month"
+msgid "April"
+msgstr "April"
+
+msgctxt "abbrev. month"
+msgid "May"
+msgstr "Mei"
+
+msgctxt "abbrev. month"
+msgid "June"
+msgstr "Junie"
+
+msgctxt "abbrev. month"
+msgid "July"
+msgstr "Julie"
+
+msgctxt "abbrev. month"
+msgid "Aug."
+msgstr "Aug."
+
+msgctxt "abbrev. month"
+msgid "Sept."
+msgstr "Sept."
+
+msgctxt "abbrev. month"
+msgid "Oct."
+msgstr "Okt."
+
+msgctxt "abbrev. month"
+msgid "Nov."
+msgstr "Nov."
+
+msgctxt "abbrev. month"
+msgid "Dec."
+msgstr "Des."
+
+msgctxt "alt. month"
+msgid "January"
+msgstr "Januarie"
+
+msgctxt "alt. month"
+msgid "February"
+msgstr "Februarie"
+
+msgctxt "alt. month"
+msgid "March"
+msgstr "Maart"
+
+msgctxt "alt. month"
+msgid "April"
+msgstr "April"
+
+msgctxt "alt. month"
+msgid "May"
+msgstr "Mei"
+
+msgctxt "alt. month"
+msgid "June"
+msgstr "Junie"
+
+msgctxt "alt. month"
+msgid "July"
+msgstr "Julie"
+
+msgctxt "alt. month"
+msgid "August"
+msgstr "Augustus"
+
+msgctxt "alt. month"
+msgid "September"
+msgstr "September"
+
+msgctxt "alt. month"
+msgid "October"
+msgstr "Oktober"
+
+msgctxt "alt. month"
+msgid "November"
+msgstr "November"
+
+msgctxt "alt. month"
+msgid "December"
+msgstr "Desember"
+
+msgid "This is not a valid IPv6 address."
+msgstr "Hierdie is nie ’n geldige IPv6-adres nie."
+
+#, python-format
+msgctxt "String to return when truncating text"
+msgid "%(truncated_text)s…"
+msgstr "%(truncated_text)s…"
+
+msgid "or"
+msgstr "of"
+
+#. Translators: This string is used as a separator between list elements
+msgid ", "
+msgstr ", "
+
+#, python-format
+msgid "%(num)d year"
+msgid_plural "%(num)d years"
+msgstr[0] "%(num)d jaar"
+msgstr[1] "%(num)d jaar"
+
+#, python-format
+msgid "%(num)d month"
+msgid_plural "%(num)d months"
+msgstr[0] "%(num)d maand"
+msgstr[1] "%(num)d maande"
+
+#, python-format
+msgid "%(num)d week"
+msgid_plural "%(num)d weeks"
+msgstr[0] "%(num)d week"
+msgstr[1] "%(num)d weke"
+
+#, python-format
+msgid "%(num)d day"
+msgid_plural "%(num)d days"
+msgstr[0] "%(num)d dag"
+msgstr[1] "%(num)d dae"
+
+#, python-format
+msgid "%(num)d hour"
+msgid_plural "%(num)d hours"
+msgstr[0] "%(num)d uur"
+msgstr[1] "%(num)d uur"
+
+#, python-format
+msgid "%(num)d minute"
+msgid_plural "%(num)d minutes"
+msgstr[0] "%(num)d minuut"
+msgstr[1] "%(num)d minute"
+
+msgid "Forbidden"
+msgstr "Verbode"
+
+msgid "CSRF verification failed. Request aborted."
+msgstr "CSRF-verifikasie het misluk. Versoek is laat val."
+
+msgid ""
+"You are seeing this message because this HTTPS site requires a “Referer "
+"header” to be sent by your web browser, but none was sent. This header is "
+"required for security reasons, to ensure that your browser is not being "
+"hijacked by third parties."
+msgstr ""
+
+msgid ""
+"If you have configured your browser to disable “Referer” headers, please re-"
+"enable them, at least for this site, or for HTTPS connections, or for “same-"
+"origin” requests."
+msgstr ""
+
+msgid ""
+"If you are using the tag or "
+"including the “Referrer-Policy: no-referrer” header, please remove them. The "
+"CSRF protection requires the “Referer” header to do strict referer checking. "
+"If you’re concerned about privacy, use alternatives like for links to third-party sites."
+msgstr ""
+
+msgid ""
+"You are seeing this message because this site requires a CSRF cookie when "
+"submitting forms. This cookie is required for security reasons, to ensure "
+"that your browser is not being hijacked by third parties."
+msgstr ""
+"U sien hierdie boodskap omdat dié werf ’n CSRF-koekie benodig wanneer vorms "
+"ingedien word. Dié koekie word vir sekuriteitsredes benodig om te te "
+"verseker dat u blaaier nie deur derde partye gekaap word nie."
+
+msgid ""
+"If you have configured your browser to disable cookies, please re-enable "
+"them, at least for this site, or for “same-origin” requests."
+msgstr ""
+
+msgid "More information is available with DEBUG=True."
+msgstr "Meer inligting is beskikbaar met DEBUG=True."
+
+msgid "No year specified"
+msgstr "Geen jaar gespesifiseer nie"
+
+msgid "Date out of range"
+msgstr "Datum buite omvang"
+
+msgid "No month specified"
+msgstr "Geen maand gespesifiseer nie"
+
+msgid "No day specified"
+msgstr "Geen dag gespesifiseer nie"
+
+msgid "No week specified"
+msgstr "Geen week gespesifiseer nie"
+
+#, python-format
+msgid "No %(verbose_name_plural)s available"
+msgstr "Geen %(verbose_name_plural)s beskikbaar nie"
+
+#, python-format
+msgid ""
+"Future %(verbose_name_plural)s not available because %(class_name)s."
+"allow_future is False."
+msgstr ""
+"Toekomstige %(verbose_name_plural)s is nie beskikbaar nie, omdat "
+"%(class_name)s.allow_future vals is."
+
+#, python-format
+msgid "Invalid date string “%(datestr)s” given format “%(format)s”"
+msgstr "Ongeldige datumstring “%(datestr)s” gegewe die formaat “%(format)s”"
+
+#, python-format
+msgid "No %(verbose_name)s found matching the query"
+msgstr "Geen %(verbose_name)s gevind vir die soektog"
+
+msgid "Page is not “last”, nor can it be converted to an int."
+msgstr ""
+
+#, python-format
+msgid "Invalid page (%(page_number)s): %(message)s"
+msgstr "Ongeldige bladsy (%(page_number)s): %(message)s"
+
+#, python-format
+msgid "Empty list and “%(class_name)s.allow_empty” is False."
+msgstr ""
+
+msgid "Directory indexes are not allowed here."
+msgstr "Gidsindekse word nie hier toegelaat nie."
+
+#, python-format
+msgid "“%(path)s” does not exist"
+msgstr "“%(path)s” bestaan nie."
+
+#, python-format
+msgid "Index of %(directory)s"
+msgstr "Indeks van %(directory)s"
+
+msgid "The install worked successfully! Congratulations!"
+msgstr "Die installasie was suksesvol! Geluk!"
+
+#, python-format
+msgid ""
+"View release notes for Django %(version)s"
+msgstr ""
+"Sien die vrystellingsnotas vir Django "
+"%(version)s"
+
+#, python-format
+msgid ""
+"You are seeing this page because DEBUG=True is in your settings file and you have not configured any "
+"URLs."
+msgstr ""
+"U sien dié bladsy omdat DEBUG=True in die settings-lêer is en geen URL’e opgestel is nie."
+
+msgid "Django Documentation"
+msgstr "Django-dokumentasie"
+
+msgid "Topics, references, & how-to’s"
+msgstr ""
+
+msgid "Tutorial: A Polling App"
+msgstr ""
+
+msgid "Get started with Django"
+msgstr "Kom aan die gang met Django"
+
+msgid "Django Community"
+msgstr "Django-gemeenskap"
+
+msgid "Connect, get help, or contribute"
+msgstr "Kontak, kry hulp om dra by"
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar/LC_MESSAGES/django.mo b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar/LC_MESSAGES/django.mo
new file mode 100644
index 00000000..f0a04129
Binary files /dev/null and b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar/LC_MESSAGES/django.mo differ
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar/LC_MESSAGES/django.po b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar/LC_MESSAGES/django.po
new file mode 100644
index 00000000..25a491b5
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar/LC_MESSAGES/django.po
@@ -0,0 +1,1389 @@
+# This file is distributed under the same license as the Django package.
+#
+# Translators:
+# Bashar Al-Abdulhadi, 2015-2016,2020-2021
+# Bashar Al-Abdulhadi, 2014
+# Eyad Toma , 2013-2014
+# Jannis Leidel , 2011
+# Mariusz Felisiak , 2021
+# Muaaz Alsaied, 2020
+# Omar Al-Ithawi , 2020
+# Ossama Khayat , 2011
+# Tony xD , 2020
+# صفا الفليج , 2020
+msgid ""
+msgstr ""
+"Project-Id-Version: django\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2021-09-21 10:22+0200\n"
+"PO-Revision-Date: 2021-11-24 16:27+0000\n"
+"Last-Translator: Mariusz Felisiak \n"
+"Language-Team: Arabic (http://www.transifex.com/django/django/language/ar/)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Language: ar\n"
+"Plural-Forms: nplurals=6; plural=n==0 ? 0 : n==1 ? 1 : n==2 ? 2 : n%100>=3 "
+"&& n%100<=10 ? 3 : n%100>=11 && n%100<=99 ? 4 : 5;\n"
+
+msgid "Afrikaans"
+msgstr "الإفريقية"
+
+msgid "Arabic"
+msgstr "العربيّة"
+
+msgid "Algerian Arabic"
+msgstr "عربي جزائري"
+
+msgid "Asturian"
+msgstr "الأسترية"
+
+msgid "Azerbaijani"
+msgstr "الأذربيجانية"
+
+msgid "Bulgarian"
+msgstr "البلغاريّة"
+
+msgid "Belarusian"
+msgstr "البيلاروسية"
+
+msgid "Bengali"
+msgstr "البنغاليّة"
+
+msgid "Breton"
+msgstr "البريتونية"
+
+msgid "Bosnian"
+msgstr "البوسنيّة"
+
+msgid "Catalan"
+msgstr "الكتلانيّة"
+
+msgid "Czech"
+msgstr "التشيكيّة"
+
+msgid "Welsh"
+msgstr "الويلز"
+
+msgid "Danish"
+msgstr "الدنماركيّة"
+
+msgid "German"
+msgstr "الألمانيّة"
+
+msgid "Lower Sorbian"
+msgstr "الصربية السفلى"
+
+msgid "Greek"
+msgstr "اليونانيّة"
+
+msgid "English"
+msgstr "الإنجليزيّة"
+
+msgid "Australian English"
+msgstr "الإنجليزية الإسترالية"
+
+msgid "British English"
+msgstr "الإنجليزيّة البريطانيّة"
+
+msgid "Esperanto"
+msgstr "الاسبرانتو"
+
+msgid "Spanish"
+msgstr "الإسبانيّة"
+
+msgid "Argentinian Spanish"
+msgstr "الأسبانية الأرجنتينية"
+
+msgid "Colombian Spanish"
+msgstr "الكولومبية الإسبانية"
+
+msgid "Mexican Spanish"
+msgstr "الأسبانية المكسيكية"
+
+msgid "Nicaraguan Spanish"
+msgstr "الإسبانية النيكاراغوية"
+
+msgid "Venezuelan Spanish"
+msgstr "الإسبانية الفنزويلية"
+
+msgid "Estonian"
+msgstr "الإستونيّة"
+
+msgid "Basque"
+msgstr "الباسك"
+
+msgid "Persian"
+msgstr "الفارسيّة"
+
+msgid "Finnish"
+msgstr "الفنلنديّة"
+
+msgid "French"
+msgstr "الفرنسيّة"
+
+msgid "Frisian"
+msgstr "الفريزيّة"
+
+msgid "Irish"
+msgstr "الإيرلنديّة"
+
+msgid "Scottish Gaelic"
+msgstr "الغيلية الأسكتلندية"
+
+msgid "Galician"
+msgstr "الجليقيّة"
+
+msgid "Hebrew"
+msgstr "العبريّة"
+
+msgid "Hindi"
+msgstr "الهندية"
+
+msgid "Croatian"
+msgstr "الكرواتيّة"
+
+msgid "Upper Sorbian"
+msgstr "الصربية العليا"
+
+msgid "Hungarian"
+msgstr "الهنغاريّة"
+
+msgid "Armenian"
+msgstr "الأرمنية"
+
+msgid "Interlingua"
+msgstr "اللغة الوسيطة"
+
+msgid "Indonesian"
+msgstr "الإندونيسيّة"
+
+msgid "Igbo"
+msgstr "الإيبو"
+
+msgid "Ido"
+msgstr "ايدو"
+
+msgid "Icelandic"
+msgstr "الآيسلنديّة"
+
+msgid "Italian"
+msgstr "الإيطاليّة"
+
+msgid "Japanese"
+msgstr "اليابانيّة"
+
+msgid "Georgian"
+msgstr "الجورجيّة"
+
+msgid "Kabyle"
+msgstr "القبائل"
+
+msgid "Kazakh"
+msgstr "الكازاخستانية"
+
+msgid "Khmer"
+msgstr "الخمر"
+
+msgid "Kannada"
+msgstr "الهنديّة (كنّادا)"
+
+msgid "Korean"
+msgstr "الكوريّة"
+
+msgid "Kyrgyz"
+msgstr "قيرغيز"
+
+msgid "Luxembourgish"
+msgstr "اللوكسمبرجية"
+
+msgid "Lithuanian"
+msgstr "اللتوانيّة"
+
+msgid "Latvian"
+msgstr "اللاتفيّة"
+
+msgid "Macedonian"
+msgstr "المقدونيّة"
+
+msgid "Malayalam"
+msgstr "المايالام"
+
+msgid "Mongolian"
+msgstr "المنغوليّة"
+
+msgid "Marathi"
+msgstr "المهاراتية"
+
+msgid "Malay"
+msgstr ""
+
+msgid "Burmese"
+msgstr "البورمية"
+
+msgid "Norwegian Bokmål"
+msgstr "النرويجية"
+
+msgid "Nepali"
+msgstr "النيبالية"
+
+msgid "Dutch"
+msgstr "الهولنديّة"
+
+msgid "Norwegian Nynorsk"
+msgstr "النينورسك نرويجيّة"
+
+msgid "Ossetic"
+msgstr "الأوسيتيكية"
+
+msgid "Punjabi"
+msgstr "البنجابيّة"
+
+msgid "Polish"
+msgstr "البولنديّة"
+
+msgid "Portuguese"
+msgstr "البرتغاليّة"
+
+msgid "Brazilian Portuguese"
+msgstr "البرتغاليّة البرازيليّة"
+
+msgid "Romanian"
+msgstr "الرومانيّة"
+
+msgid "Russian"
+msgstr "الروسيّة"
+
+msgid "Slovak"
+msgstr "السلوفاكيّة"
+
+msgid "Slovenian"
+msgstr "السلوفانيّة"
+
+msgid "Albanian"
+msgstr "الألبانيّة"
+
+msgid "Serbian"
+msgstr "الصربيّة"
+
+msgid "Serbian Latin"
+msgstr "اللاتينيّة الصربيّة"
+
+msgid "Swedish"
+msgstr "السويديّة"
+
+msgid "Swahili"
+msgstr "السواحلية"
+
+msgid "Tamil"
+msgstr "التاميل"
+
+msgid "Telugu"
+msgstr "التيلوغو"
+
+msgid "Tajik"
+msgstr "طاجيك"
+
+msgid "Thai"
+msgstr "التايلنديّة"
+
+msgid "Turkmen"
+msgstr "تركمان"
+
+msgid "Turkish"
+msgstr "التركيّة"
+
+msgid "Tatar"
+msgstr "التتاريية"
+
+msgid "Udmurt"
+msgstr "الأدمرتية"
+
+msgid "Ukrainian"
+msgstr "الأكرانيّة"
+
+msgid "Urdu"
+msgstr "الأوردو"
+
+msgid "Uzbek"
+msgstr "الأوزبكي"
+
+msgid "Vietnamese"
+msgstr "الفيتناميّة"
+
+msgid "Simplified Chinese"
+msgstr "الصينيّة المبسطة"
+
+msgid "Traditional Chinese"
+msgstr "الصينيّة التقليدية"
+
+msgid "Messages"
+msgstr "الرسائل"
+
+msgid "Site Maps"
+msgstr "خرائط الموقع"
+
+msgid "Static Files"
+msgstr "الملفات الثابتة"
+
+msgid "Syndication"
+msgstr "توظيف النشر"
+
+#. Translators: String used to replace omitted page numbers in elided page
+#. range generated by paginators, e.g. [1, 2, '…', 5, 6, 7, '…', 9, 10].
+msgid "…"
+msgstr "..."
+
+msgid "That page number is not an integer"
+msgstr "رقم الصفحة هذا ليس عدداً طبيعياً"
+
+msgid "That page number is less than 1"
+msgstr "رقم الصفحة أقل من 1"
+
+msgid "That page contains no results"
+msgstr "هذه الصفحة لا تحتوي على نتائج"
+
+msgid "Enter a valid value."
+msgstr "أدخِل قيمة صحيحة."
+
+msgid "Enter a valid URL."
+msgstr "أدخِل رابطًا صحيحًا."
+
+msgid "Enter a valid integer."
+msgstr "أدخِل عدداً طبيعياً."
+
+msgid "Enter a valid email address."
+msgstr "أدخِل عنوان بريد إلكتروني صحيح."
+
+#. Translators: "letters" means latin letters: a-z and A-Z.
+msgid ""
+"Enter a valid “slug” consisting of letters, numbers, underscores or hyphens."
+msgstr "أدخل اختصار 'slug' صحيح يتكوّن من أحرف، أرقام، شرطات سفلية وعاديّة."
+
+msgid ""
+"Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or "
+"hyphens."
+msgstr ""
+"أدخل اختصار 'slug' صحيح يتكون من أحرف Unicode أو أرقام أو شرطات سفلية أو "
+"واصلات."
+
+msgid "Enter a valid IPv4 address."
+msgstr "أدخِل عنوان IPv4 صحيح."
+
+msgid "Enter a valid IPv6 address."
+msgstr "أدخِل عنوان IPv6 صحيح."
+
+msgid "Enter a valid IPv4 or IPv6 address."
+msgstr "أدخِل عنوان IPv4 أو عنوان IPv6 صحيح."
+
+msgid "Enter only digits separated by commas."
+msgstr "أدخِل فقط أرقامًا تفصلها الفواصل."
+
+#, python-format
+msgid "Ensure this value is %(limit_value)s (it is %(show_value)s)."
+msgstr "تحقق من أن هذه القيمة هي %(limit_value)s (إنها %(show_value)s)."
+
+#, python-format
+msgid "Ensure this value is less than or equal to %(limit_value)s."
+msgstr "تحقق من أن تكون هذه القيمة أقل من %(limit_value)s أو مساوية لها."
+
+#, python-format
+msgid "Ensure this value is greater than or equal to %(limit_value)s."
+msgstr "تحقق من أن تكون هذه القيمة أكثر من %(limit_value)s أو مساوية لها."
+
+#, python-format
+msgid ""
+"Ensure this value has at least %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at least %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأقل (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[1] ""
+"تأكد أن هذه القيمة تحتوي على حرف أو رمز %(limit_value)d على الأقل (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[2] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف و رمز على الأقل (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[3] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأقل (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[4] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأقل (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[5] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأقل (هي تحتوي "
+"حالياً على %(show_value)d)."
+
+#, python-format
+msgid ""
+"Ensure this value has at most %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at most %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأكثر (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[1] ""
+"تأكد أن هذه القيمة تحتوي على حرف أو رمز %(limit_value)d على الأكثر (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[2] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف و رمز على الأكثر (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[3] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأكثر (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[4] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأكثر (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[5] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأكثر (هي تحتوي "
+"حالياً على %(show_value)d)."
+
+msgid "Enter a number."
+msgstr "أدخل رقماً."
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s digit in total."
+msgid_plural "Ensure that there are no more than %(max)s digits in total."
+msgstr[0] "تحقق من أن تدخل %(max)s أرقام لا أكثر."
+msgstr[1] "تحقق من أن تدخل رقم %(max)s لا أكثر."
+msgstr[2] "تحقق من أن تدخل %(max)s رقمين لا أكثر."
+msgstr[3] "تحقق من أن تدخل %(max)s أرقام لا أكثر."
+msgstr[4] "تحقق من أن تدخل %(max)s أرقام لا أكثر."
+msgstr[5] "تحقق من أن تدخل %(max)s أرقام لا أكثر."
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s decimal place."
+msgid_plural "Ensure that there are no more than %(max)s decimal places."
+msgstr[0] "تحقق من أن تدخل %(max)s خانات عشرية لا أكثر."
+msgstr[1] "تحقق من أن تدخل خانة %(max)s عشرية لا أكثر."
+msgstr[2] "تحقق من أن تدخل %(max)s خانتين عشريتين لا أكثر."
+msgstr[3] "تحقق من أن تدخل %(max)s خانات عشرية لا أكثر."
+msgstr[4] "تحقق من أن تدخل %(max)s خانات عشرية لا أكثر."
+msgstr[5] "تحقق من أن تدخل %(max)s خانات عشرية لا أكثر."
+
+#, python-format
+msgid ""
+"Ensure that there are no more than %(max)s digit before the decimal point."
+msgid_plural ""
+"Ensure that there are no more than %(max)s digits before the decimal point."
+msgstr[0] "تحقق من أن تدخل %(max)s أرقام قبل الفاصل العشري لا أكثر."
+msgstr[1] "تحقق من أن تدخل رقم %(max)s قبل الفاصل العشري لا أكثر."
+msgstr[2] "تحقق من أن تدخل %(max)s رقمين قبل الفاصل العشري لا أكثر."
+msgstr[3] "تحقق من أن تدخل %(max)s أرقام قبل الفاصل العشري لا أكثر."
+msgstr[4] "تحقق من أن تدخل %(max)s أرقام قبل الفاصل العشري لا أكثر."
+msgstr[5] "تحقق من أن تدخل %(max)s أرقام قبل الفاصل العشري لا أكثر."
+
+#, python-format
+msgid ""
+"File extension “%(extension)s” is not allowed. Allowed extensions are: "
+"%(allowed_extensions)s."
+msgstr ""
+"امتداد الملف “%(extension)s” غير مسموح به. الامتدادات المسموح بها هي:"
+"%(allowed_extensions)s."
+
+msgid "Null characters are not allowed."
+msgstr "الأحرف الخالية غير مسموح بها."
+
+msgid "and"
+msgstr "و"
+
+#, python-format
+msgid "%(model_name)s with this %(field_labels)s already exists."
+msgstr "%(model_name)s بهذا %(field_labels)s موجود سلفاً."
+
+#, python-format
+msgid "Value %(value)r is not a valid choice."
+msgstr "القيمة %(value)r ليست خيارا صحيحاً."
+
+msgid "This field cannot be null."
+msgstr "لا يمكن تعيين null كقيمة لهذا الحقل."
+
+msgid "This field cannot be blank."
+msgstr "لا يمكن ترك هذا الحقل فارغاً."
+
+#, python-format
+msgid "%(model_name)s with this %(field_label)s already exists."
+msgstr "النموذج %(model_name)s والحقل %(field_label)s موجود مسبقاً."
+
+#. Translators: The 'lookup_type' is one of 'date', 'year' or 'month'.
+#. Eg: "Title must be unique for pub_date year"
+#, python-format
+msgid ""
+"%(field_label)s must be unique for %(date_field_label)s %(lookup_type)s."
+msgstr ""
+"%(field_label)s يجب أن يكون فريد لـ %(date_field_label)s %(lookup_type)s."
+
+#, python-format
+msgid "Field of type: %(field_type)s"
+msgstr "حقل نوع: %(field_type)s"
+
+#, python-format
+msgid "“%(value)s” value must be either True or False."
+msgstr "قيمة '%(value)s' يجب أن تكون True أو False."
+
+#, python-format
+msgid "“%(value)s” value must be either True, False, or None."
+msgstr "قيمة “%(value)s” يجب أن تكون True , False أو None."
+
+msgid "Boolean (Either True or False)"
+msgstr "ثنائي (إما True أو False)"
+
+#, python-format
+msgid "String (up to %(max_length)s)"
+msgstr "سلسلة نص (%(max_length)s كحد أقصى)"
+
+msgid "Comma-separated integers"
+msgstr "أرقام صحيحة مفصولة بفواصل"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid date format. It must be in YYYY-MM-DD "
+"format."
+msgstr ""
+"قيمة '%(value)s' ليست من بُنية تاريخ صحيحة. القيمة يجب ان تكون من البُنية YYYY-"
+"MM-DD."
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD) but it is an invalid "
+"date."
+msgstr "قيمة '%(value)s' من بُنية صحيحة (YYYY-MM-DD) لكنها تحوي تاريخ غير صحيح."
+
+msgid "Date (without time)"
+msgstr "التاريخ (دون الوقت)"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in YYYY-MM-DD HH:MM[:ss[."
+"uuuuuu]][TZ] format."
+msgstr ""
+"قيمة '%(value)s' ليست من بُنية صحيحة. القيمة يجب ان تكون من البُنية YYYY-MM-DD "
+"HH:MM[:ss[.uuuuuu]][TZ] ."
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD HH:MM[:ss[.uuuuuu]]"
+"[TZ]) but it is an invalid date/time."
+msgstr ""
+"قيمة '%(value)s' من بُنية صحيحة (YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) لكنها "
+"تحوي وقت و تاريخ غير صحيحين."
+
+msgid "Date (with time)"
+msgstr "التاريخ (مع الوقت)"
+
+#, python-format
+msgid "“%(value)s” value must be a decimal number."
+msgstr "قيمة '%(value)s' يجب ان تكون عدد عشري."
+
+msgid "Decimal number"
+msgstr "رقم عشري"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in [DD] [[HH:]MM:]ss[."
+"uuuuuu] format."
+msgstr ""
+"قيمة '%(value)s' ليست بنسق صحيح. القيمة يجب ان تكون من التنسيق ([DD] "
+"[[HH:]MM:]ss[.uuuuuu])"
+
+msgid "Duration"
+msgstr "المدّة"
+
+msgid "Email address"
+msgstr "عنوان بريد إلكتروني"
+
+msgid "File path"
+msgstr "مسار الملف"
+
+#, python-format
+msgid "“%(value)s” value must be a float."
+msgstr "قيمة '%(value)s' يجب ان تكون عدد تعويم."
+
+msgid "Floating point number"
+msgstr "رقم فاصلة عائمة"
+
+#, python-format
+msgid "“%(value)s” value must be an integer."
+msgstr "قيمة '%(value)s' يجب ان تكون عدد طبيعي."
+
+msgid "Integer"
+msgstr "عدد صحيح"
+
+msgid "Big (8 byte) integer"
+msgstr "عدد صحيح كبير (8 بايت)"
+
+msgid "Small integer"
+msgstr "عدد صحيح صغير"
+
+msgid "IPv4 address"
+msgstr "عنوان IPv4"
+
+msgid "IP address"
+msgstr "عنوان IP"
+
+#, python-format
+msgid "“%(value)s” value must be either None, True or False."
+msgstr "قيمة '%(value)s' يجب ان تكون None أو True أو False."
+
+msgid "Boolean (Either True, False or None)"
+msgstr "ثنائي (إما True أو False أو None)"
+
+msgid "Positive big integer"
+msgstr "عدد صحيح موجب كبير"
+
+msgid "Positive integer"
+msgstr "عدد صحيح موجب"
+
+msgid "Positive small integer"
+msgstr "عدد صحيح صغير موجب"
+
+#, python-format
+msgid "Slug (up to %(max_length)s)"
+msgstr "Slug (حتى %(max_length)s)"
+
+msgid "Text"
+msgstr "نص"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in HH:MM[:ss[.uuuuuu]] "
+"format."
+msgstr ""
+"قيمة '%(value)s' ليست بنسق صحيح. القيمة يجب ان تكون من التنسيق\n"
+"HH:MM[:ss[.uuuuuu]]"
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (HH:MM[:ss[.uuuuuu]]) but it is an "
+"invalid time."
+msgstr ""
+"قيمة '%(value)s' من بُنية صحيحة (HH:MM[:ss[.uuuuuu]]) لكنها تحوي وقت غير صحيح."
+
+msgid "Time"
+msgstr "وقت"
+
+msgid "URL"
+msgstr "رابط"
+
+msgid "Raw binary data"
+msgstr "البيانات الثنائية الخام"
+
+#, python-format
+msgid "“%(value)s” is not a valid UUID."
+msgstr "القيمة \"%(value)s\" ليست UUID صالح."
+
+msgid "Universally unique identifier"
+msgstr "معرّف فريد عالمياً"
+
+msgid "File"
+msgstr "ملف"
+
+msgid "Image"
+msgstr "صورة"
+
+msgid "A JSON object"
+msgstr "كائن JSON"
+
+msgid "Value must be valid JSON."
+msgstr "يجب أن تكون قيمة JSON صالحة."
+
+#, python-format
+msgid "%(model)s instance with %(field)s %(value)r does not exist."
+msgstr "النموذج %(model)s ذو الحقل و القيمة %(field)s %(value)r غير موجود."
+
+msgid "Foreign Key (type determined by related field)"
+msgstr "الحقل المرتبط (تم تحديد النوع وفقاً للحقل المرتبط)"
+
+msgid "One-to-one relationship"
+msgstr "علاقة واحد إلى واحد"
+
+#, python-format
+msgid "%(from)s-%(to)s relationship"
+msgstr "%(from)s-%(to)s علاقة"
+
+#, python-format
+msgid "%(from)s-%(to)s relationships"
+msgstr "%(from)s-%(to)s علاقات"
+
+msgid "Many-to-many relationship"
+msgstr "علاقة متعدد إلى متعدد"
+
+#. Translators: If found as last label character, these punctuation
+#. characters will prevent the default label_suffix to be appended to the
+#. label
+msgid ":?.!"
+msgstr ":?.!"
+
+msgid "This field is required."
+msgstr "هذا الحقل مطلوب."
+
+msgid "Enter a whole number."
+msgstr "أدخل رقما صحيحا."
+
+msgid "Enter a valid date."
+msgstr "أدخل تاريخاً صحيحاً."
+
+msgid "Enter a valid time."
+msgstr "أدخل وقتاً صحيحاً."
+
+msgid "Enter a valid date/time."
+msgstr "أدخل تاريخاً/وقتاً صحيحاً."
+
+msgid "Enter a valid duration."
+msgstr "أدخل مدّة صحيحة"
+
+#, python-brace-format
+msgid "The number of days must be between {min_days} and {max_days}."
+msgstr "يجب أن يكون عدد الأيام بين {min_days} و {max_days}."
+
+msgid "No file was submitted. Check the encoding type on the form."
+msgstr "لم يتم ارسال ملف، الرجاء التأكد من نوع ترميز الاستمارة."
+
+msgid "No file was submitted."
+msgstr "لم يتم إرسال اي ملف."
+
+msgid "The submitted file is empty."
+msgstr "الملف الذي قمت بإرساله فارغ."
+
+#, python-format
+msgid "Ensure this filename has at most %(max)d character (it has %(length)d)."
+msgid_plural ""
+"Ensure this filename has at most %(max)d characters (it has %(length)d)."
+msgstr[0] ""
+"تأكد أن إسم هذا الملف يحتوي على %(max)d حرف على الأكثر (هو يحتوي الآن على "
+"%(length)d حرف)."
+msgstr[1] ""
+"تأكد أن إسم هذا الملف يحتوي على حرف %(max)d على الأكثر (هو يحتوي الآن على "
+"%(length)d حرف)."
+msgstr[2] ""
+"تأكد أن إسم هذا الملف يحتوي على %(max)d حرفين على الأكثر (هو يحتوي الآن على "
+"%(length)d حرف)."
+msgstr[3] ""
+"تأكد أن إسم هذا الملف يحتوي على %(max)d حرف على الأكثر (هو يحتوي الآن على "
+"%(length)d حرف)."
+msgstr[4] ""
+"تأكد أن إسم هذا الملف يحتوي على %(max)d حرف على الأكثر (هو يحتوي الآن على "
+"%(length)d حرف)."
+msgstr[5] ""
+"تأكد أن إسم هذا الملف يحتوي على %(max)d حرف على الأكثر (هو يحتوي الآن على "
+"%(length)d حرف)."
+
+msgid "Please either submit a file or check the clear checkbox, not both."
+msgstr "رجاءً أرسل ملف أو صح علامة صح عند مربع اختيار \"فارغ\"، وليس كلاهما."
+
+msgid ""
+"Upload a valid image. The file you uploaded was either not an image or a "
+"corrupted image."
+msgstr ""
+"قم برفع صورة صحيحة، الملف الذي قمت برفعه إما أنه ليس ملفا لصورة أو أنه ملف "
+"معطوب."
+
+#, python-format
+msgid "Select a valid choice. %(value)s is not one of the available choices."
+msgstr "انتق خياراً صحيحاً. %(value)s ليس أحد الخيارات المتاحة."
+
+msgid "Enter a list of values."
+msgstr "أدخل قائمة من القيم."
+
+msgid "Enter a complete value."
+msgstr "إدخال قيمة كاملة."
+
+msgid "Enter a valid UUID."
+msgstr "أدخل قيمة UUID صحيحة."
+
+msgid "Enter a valid JSON."
+msgstr "أدخل مدخل JSON صالح."
+
+#. Translators: This is the default suffix added to form field labels
+msgid ":"
+msgstr ":"
+
+#, python-format
+msgid "(Hidden field %(name)s) %(error)s"
+msgstr "(الحقل الخفي %(name)s) %(error)s"
+
+#, python-format
+msgid ""
+"ManagementForm data is missing or has been tampered with. Missing fields: "
+"%(field_names)s. You may need to file a bug report if the issue persists."
+msgstr ""
+"بيانات نموذج الإدارة مفقودة أو تم العبث بها. الحقول المفقودة: "
+"%(field_names)s. قد تحتاج إلى تقديم تقرير خطأ إذا استمرت المشكلة."
+
+#, python-format
+msgid "Please submit at most %d form."
+msgid_plural "Please submit at most %d forms."
+msgstr[0] "الرجاء إرسال %d إستمارة على الأكثر."
+msgstr[1] "الرجاء إرسال %d إستمارة على الأكثر."
+msgstr[2] "الرجاء إرسال %d إستمارة على الأكثر."
+msgstr[3] "الرجاء إرسال %d إستمارة على الأكثر."
+msgstr[4] "الرجاء إرسال %d إستمارة على الأكثر."
+msgstr[5] "الرجاء إرسال %d إستمارة على الأكثر."
+
+#, python-format
+msgid "Please submit at least %d form."
+msgid_plural "Please submit at least %d forms."
+msgstr[0] "الرجاء إرسال %d إستمارة على الأقل."
+msgstr[1] "الرجاء إرسال %d إستمارة على الأقل."
+msgstr[2] "الرجاء إرسال %d إستمارة على الأقل."
+msgstr[3] "الرجاء إرسال %d إستمارة على الأقل."
+msgstr[4] "الرجاء إرسال %d إستمارة على الأقل."
+msgstr[5] "الرجاء إرسال %d إستمارة على الأقل."
+
+msgid "Order"
+msgstr "الترتيب"
+
+msgid "Delete"
+msgstr "احذف"
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s."
+msgstr "رجاء صحّح بيانات %(field)s المتكررة."
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s, which must be unique."
+msgstr "رجاء صحّح بيانات %(field)s المتكررة والتي يجب أن تكون مُميّزة."
+
+#, python-format
+msgid ""
+"Please correct the duplicate data for %(field_name)s which must be unique "
+"for the %(lookup)s in %(date_field)s."
+msgstr ""
+"رجاء صحّح بيانات %(field_name)s المتكررة والتي يجب أن تكون مُميّزة لـ%(lookup)s "
+"في %(date_field)s."
+
+msgid "Please correct the duplicate values below."
+msgstr "رجاءً صحّح القيم المُكرّرة أدناه."
+
+msgid "The inline value did not match the parent instance."
+msgstr "لا تتطابق القيمة المضمنة مع المثيل الأصلي."
+
+msgid "Select a valid choice. That choice is not one of the available choices."
+msgstr "انتق خياراً صحيحاً. اختيارك ليس أحد الخيارات المتاحة."
+
+#, python-format
+msgid "“%(pk)s” is not a valid value."
+msgstr "\"%(pk)s\" ليست قيمة صالحة."
+
+#, python-format
+msgid ""
+"%(datetime)s couldn’t be interpreted in time zone %(current_timezone)s; it "
+"may be ambiguous or it may not exist."
+msgstr ""
+"%(datetime)s لا يمكن تفسيرها في المنطقة الزمنية %(current_timezone)s; قد "
+"تكون غامضة أو أنها غير موجودة."
+
+msgid "Clear"
+msgstr "تفريغ"
+
+msgid "Currently"
+msgstr "حالياً"
+
+msgid "Change"
+msgstr "عدّل"
+
+msgid "Unknown"
+msgstr "مجهول"
+
+msgid "Yes"
+msgstr "نعم"
+
+msgid "No"
+msgstr "لا"
+
+#. Translators: Please do not add spaces around commas.
+msgid "yes,no,maybe"
+msgstr "نعم,لا,ربما"
+
+#, python-format
+msgid "%(size)d byte"
+msgid_plural "%(size)d bytes"
+msgstr[0] "%(size)d بايت"
+msgstr[1] "بايت واحد"
+msgstr[2] "بايتان"
+msgstr[3] "%(size)d بايتان"
+msgstr[4] "%(size)d بايت"
+msgstr[5] "%(size)d بايت"
+
+#, python-format
+msgid "%s KB"
+msgstr "%s ك.ب"
+
+#, python-format
+msgid "%s MB"
+msgstr "%s م.ب"
+
+#, python-format
+msgid "%s GB"
+msgstr "%s ج.ب"
+
+#, python-format
+msgid "%s TB"
+msgstr "%s ت.ب"
+
+#, python-format
+msgid "%s PB"
+msgstr "%s ب.ب"
+
+msgid "p.m."
+msgstr "م"
+
+msgid "a.m."
+msgstr "ص"
+
+msgid "PM"
+msgstr "م"
+
+msgid "AM"
+msgstr "ص"
+
+msgid "midnight"
+msgstr "منتصف الليل"
+
+msgid "noon"
+msgstr "ظهراً"
+
+msgid "Monday"
+msgstr "الاثنين"
+
+msgid "Tuesday"
+msgstr "الثلاثاء"
+
+msgid "Wednesday"
+msgstr "الأربعاء"
+
+msgid "Thursday"
+msgstr "الخميس"
+
+msgid "Friday"
+msgstr "الجمعة"
+
+msgid "Saturday"
+msgstr "السبت"
+
+msgid "Sunday"
+msgstr "الأحد"
+
+msgid "Mon"
+msgstr "إثنين"
+
+msgid "Tue"
+msgstr "ثلاثاء"
+
+msgid "Wed"
+msgstr "أربعاء"
+
+msgid "Thu"
+msgstr "خميس"
+
+msgid "Fri"
+msgstr "جمعة"
+
+msgid "Sat"
+msgstr "سبت"
+
+msgid "Sun"
+msgstr "أحد"
+
+msgid "January"
+msgstr "يناير"
+
+msgid "February"
+msgstr "فبراير"
+
+msgid "March"
+msgstr "مارس"
+
+msgid "April"
+msgstr "إبريل"
+
+msgid "May"
+msgstr "مايو"
+
+msgid "June"
+msgstr "يونيو"
+
+msgid "July"
+msgstr "يوليو"
+
+msgid "August"
+msgstr "أغسطس"
+
+msgid "September"
+msgstr "سبتمبر"
+
+msgid "October"
+msgstr "أكتوبر"
+
+msgid "November"
+msgstr "نوفمبر"
+
+msgid "December"
+msgstr "ديسمبر"
+
+msgid "jan"
+msgstr "يناير"
+
+msgid "feb"
+msgstr "فبراير"
+
+msgid "mar"
+msgstr "مارس"
+
+msgid "apr"
+msgstr "إبريل"
+
+msgid "may"
+msgstr "مايو"
+
+msgid "jun"
+msgstr "يونيو"
+
+msgid "jul"
+msgstr "يوليو"
+
+msgid "aug"
+msgstr "أغسطس"
+
+msgid "sep"
+msgstr "سبتمبر"
+
+msgid "oct"
+msgstr "أكتوبر"
+
+msgid "nov"
+msgstr "نوفمبر"
+
+msgid "dec"
+msgstr "ديسمبر"
+
+msgctxt "abbrev. month"
+msgid "Jan."
+msgstr "يناير"
+
+msgctxt "abbrev. month"
+msgid "Feb."
+msgstr "فبراير"
+
+msgctxt "abbrev. month"
+msgid "March"
+msgstr "مارس"
+
+msgctxt "abbrev. month"
+msgid "April"
+msgstr "إبريل"
+
+msgctxt "abbrev. month"
+msgid "May"
+msgstr "مايو"
+
+msgctxt "abbrev. month"
+msgid "June"
+msgstr "يونيو"
+
+msgctxt "abbrev. month"
+msgid "July"
+msgstr "يوليو"
+
+msgctxt "abbrev. month"
+msgid "Aug."
+msgstr "أغسطس"
+
+msgctxt "abbrev. month"
+msgid "Sept."
+msgstr "سبتمبر"
+
+msgctxt "abbrev. month"
+msgid "Oct."
+msgstr "أكتوبر"
+
+msgctxt "abbrev. month"
+msgid "Nov."
+msgstr "نوفمبر"
+
+msgctxt "abbrev. month"
+msgid "Dec."
+msgstr "ديسمبر"
+
+msgctxt "alt. month"
+msgid "January"
+msgstr "يناير"
+
+msgctxt "alt. month"
+msgid "February"
+msgstr "فبراير"
+
+msgctxt "alt. month"
+msgid "March"
+msgstr "مارس"
+
+msgctxt "alt. month"
+msgid "April"
+msgstr "أبريل"
+
+msgctxt "alt. month"
+msgid "May"
+msgstr "مايو"
+
+msgctxt "alt. month"
+msgid "June"
+msgstr "يونيو"
+
+msgctxt "alt. month"
+msgid "July"
+msgstr "يوليو"
+
+msgctxt "alt. month"
+msgid "August"
+msgstr "أغسطس"
+
+msgctxt "alt. month"
+msgid "September"
+msgstr "سبتمبر"
+
+msgctxt "alt. month"
+msgid "October"
+msgstr "أكتوبر"
+
+msgctxt "alt. month"
+msgid "November"
+msgstr "نوفمبر"
+
+msgctxt "alt. month"
+msgid "December"
+msgstr "ديسمبر"
+
+msgid "This is not a valid IPv6 address."
+msgstr "هذا ليس عنوان IPv6 صحيح."
+
+#, python-format
+msgctxt "String to return when truncating text"
+msgid "%(truncated_text)s…"
+msgstr "%(truncated_text)s…"
+
+msgid "or"
+msgstr "أو"
+
+#. Translators: This string is used as a separator between list elements
+msgid ", "
+msgstr "، "
+
+#, python-format
+msgid "%(num)d year"
+msgid_plural "%(num)d years"
+msgstr[0] "%(num)d سنة"
+msgstr[1] "%(num)d سنة"
+msgstr[2] "%(num)d سنتين"
+msgstr[3] "%(num)d سنوات"
+msgstr[4] "%(num)d سنوات"
+msgstr[5] "%(num)d سنوات"
+
+#, python-format
+msgid "%(num)d month"
+msgid_plural "%(num)d months"
+msgstr[0] "%(num)d شهر"
+msgstr[1] "%(num)d شهر"
+msgstr[2] "%(num)d شهرين"
+msgstr[3] "%(num)d أشهر"
+msgstr[4] "%(num)d أشهر"
+msgstr[5] "%(num)d أشهر"
+
+#, python-format
+msgid "%(num)d week"
+msgid_plural "%(num)d weeks"
+msgstr[0] "%(num)d أسبوع"
+msgstr[1] "%(num)d أسبوع"
+msgstr[2] "%(num)d أسبوعين"
+msgstr[3] "%(num)d أسابيع"
+msgstr[4] "%(num)d أسابيع"
+msgstr[5] "%(num)d أسابيع"
+
+#, python-format
+msgid "%(num)d day"
+msgid_plural "%(num)d days"
+msgstr[0] "%(num)d يوم"
+msgstr[1] "%(num)d يوم"
+msgstr[2] "%(num)d يومين"
+msgstr[3] "%(num)d أيام"
+msgstr[4] "%(num)d يوم"
+msgstr[5] "%(num)d أيام"
+
+#, python-format
+msgid "%(num)d hour"
+msgid_plural "%(num)d hours"
+msgstr[0] "%(num)d ساعة"
+msgstr[1] "%(num)d ساعة"
+msgstr[2] "%(num)d ساعتين"
+msgstr[3] "%(num)d ساعات"
+msgstr[4] "%(num)d ساعة"
+msgstr[5] "%(num)d ساعات"
+
+#, python-format
+msgid "%(num)d minute"
+msgid_plural "%(num)d minutes"
+msgstr[0] "%(num)d دقيقة"
+msgstr[1] "%(num)d دقيقة"
+msgstr[2] "%(num)d دقيقتين"
+msgstr[3] "%(num)d دقائق"
+msgstr[4] "%(num)d دقيقة"
+msgstr[5] "%(num)d دقيقة"
+
+msgid "Forbidden"
+msgstr "ممنوع"
+
+msgid "CSRF verification failed. Request aborted."
+msgstr "تم الفشل للتحقق من CSRF. تم إنهاء الطلب."
+
+msgid ""
+"You are seeing this message because this HTTPS site requires a “Referer "
+"header” to be sent by your web browser, but none was sent. This header is "
+"required for security reasons, to ensure that your browser is not being "
+"hijacked by third parties."
+msgstr ""
+"أنت ترى هذه الرسالة لأن موقع HTTPS هذا يتطلب إرسال “Referer header” بواسطة "
+"متصفح الويب الخاص بك، ولكن لم يتم إرسال أي منها. هذا مطلوب لأسباب أمنية، "
+"لضمان عدم اختطاف متصفحك من قبل أطراف ثالثة."
+
+msgid ""
+"If you have configured your browser to disable “Referer” headers, please re-"
+"enable them, at least for this site, or for HTTPS connections, or for “same-"
+"origin” requests."
+msgstr ""
+"إذا قمت بتكوين المستعرض لتعطيل رؤوس “Referer” ، فيرجى إعادة تمكينها ، على "
+"الأقل لهذا الموقع ، أو لاتصالات HTTPS ، أو لطلبات “same-origin”."
+
+msgid ""
+"If you are using the tag or "
+"including the “Referrer-Policy: no-referrer” header, please remove them. The "
+"CSRF protection requires the “Referer” header to do strict referer checking. "
+"If you’re concerned about privacy, use alternatives like for links to third-party sites."
+msgstr ""
+"إذا كنت تستخدم العلامة أو "
+"تضمين رأس “Referrer-Policy: no-referrer”، يرجى إزالتها. تتطلب حماية CSRF أن "
+"يقوم رأس “Referer” بإجراء فحص صارم للمراجع. إذا كنت قلقًا بشأن الخصوصية ، "
+"فاستخدم بدائل مثل للروابط إلى مواقع الجهات الخارجية."
+
+msgid ""
+"You are seeing this message because this site requires a CSRF cookie when "
+"submitting forms. This cookie is required for security reasons, to ensure "
+"that your browser is not being hijacked by third parties."
+msgstr ""
+"أنت ترى هذه الرسالة لأن هذا الموقع يتطلب كعكة CSRF عند تقديم النماذج. ملف "
+"الكعكة هذا مطلوب لأسباب أمنية في تعريف الإرتباط، لضمان أنه لم يتم اختطاف "
+"المتصفح من قبل أطراف أخرى."
+
+msgid ""
+"If you have configured your browser to disable cookies, please re-enable "
+"them, at least for this site, or for “same-origin” requests."
+msgstr ""
+"إذا قمت بضبط المتصفح لتعطيل الكوكيز الرجاء إعادة تغعيلها، على الأقل بالنسبة "
+"لهذا الموقع، أو للطلبات من “same-origin”."
+
+msgid "More information is available with DEBUG=True."
+msgstr "يتوفر مزيد من المعلومات عند ضبط الخيار DEBUG=True."
+
+msgid "No year specified"
+msgstr "لم تحدد السنة"
+
+msgid "Date out of range"
+msgstr "التاريخ خارج النطاق"
+
+msgid "No month specified"
+msgstr "لم تحدد الشهر"
+
+msgid "No day specified"
+msgstr "لم تحدد اليوم"
+
+msgid "No week specified"
+msgstr "لم تحدد الأسبوع"
+
+#, python-format
+msgid "No %(verbose_name_plural)s available"
+msgstr "لا يوجد %(verbose_name_plural)s"
+
+#, python-format
+msgid ""
+"Future %(verbose_name_plural)s not available because %(class_name)s."
+"allow_future is False."
+msgstr ""
+"التاريخ بالمستقبل %(verbose_name_plural)s غير متوفر لأن قيمة %(class_name)s."
+"allow_future هي False."
+
+#, python-format
+msgid "Invalid date string “%(datestr)s” given format “%(format)s”"
+msgstr "نسق تاريخ غير صحيح \"%(datestr)s\" محدد بالشكل ''%(format)s\""
+
+#, python-format
+msgid "No %(verbose_name)s found matching the query"
+msgstr "لم يعثر على أي %(verbose_name)s مطابقة لهذا الإستعلام"
+
+msgid "Page is not “last”, nor can it be converted to an int."
+msgstr "الصفحة ليست \"الأخيرة\"، كما لا يمكن تحويل القيمة إلى رقم طبيعي."
+
+#, python-format
+msgid "Invalid page (%(page_number)s): %(message)s"
+msgstr "صفحة خاطئة (%(page_number)s): %(message)s"
+
+#, python-format
+msgid "Empty list and “%(class_name)s.allow_empty” is False."
+msgstr ""
+"قائمة فارغة و\n"
+"\"%(class_name)s.allow_empty\"\n"
+"قيمته False."
+
+msgid "Directory indexes are not allowed here."
+msgstr "لا يسمح لفهارس الدليل هنا."
+
+#, python-format
+msgid "“%(path)s” does not exist"
+msgstr "”%(path)s“ غير موجود"
+
+#, python-format
+msgid "Index of %(directory)s"
+msgstr "فهرس لـ %(directory)s"
+
+msgid "The install worked successfully! Congratulations!"
+msgstr "تمت عملية التنصيب بنجاح! تهانينا!"
+
+#, python-format
+msgid ""
+"View release notes for Django %(version)s"
+msgstr ""
+"استعراض ملاحظات الإصدار لجانغو %(version)s"
+
+#, python-format
+msgid ""
+"You are seeing this page because DEBUG=True is in your settings file and you have not configured any "
+"URLs."
+msgstr ""
+"تظهر لك هذه الصفحة لأن DEBUG=True في ملف settings خاصتك كما أنك لم تقم بإعداد الروابط URLs."
+
+msgid "Django Documentation"
+msgstr "وثائق تعليمات جانغو"
+
+msgid "Topics, references, & how-to’s"
+msgstr "المواضيع و المراجع و التعليمات"
+
+msgid "Tutorial: A Polling App"
+msgstr "برنامج تعليمي: تطبيق تصويت"
+
+msgid "Get started with Django"
+msgstr "إبدأ مع جانغو"
+
+msgid "Django Community"
+msgstr "مجتمع جانغو"
+
+msgid "Connect, get help, or contribute"
+msgstr "اتصل بنا أو احصل على مساعدة أو ساهم"
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar/formats.py b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar/formats.py
new file mode 100644
index 00000000..8008ce6e
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar/formats.py
@@ -0,0 +1,21 @@
+# This file is distributed under the same license as the Django package.
+#
+# The *_FORMAT strings use the Django date format syntax,
+# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+DATE_FORMAT = "j F، Y"
+TIME_FORMAT = "g:i A"
+# DATETIME_FORMAT =
+YEAR_MONTH_FORMAT = "F Y"
+MONTH_DAY_FORMAT = "j F"
+SHORT_DATE_FORMAT = "d/m/Y"
+# SHORT_DATETIME_FORMAT =
+# FIRST_DAY_OF_WEEK =
+
+# The *_INPUT_FORMATS strings use the Python strftime format syntax,
+# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
+# DATE_INPUT_FORMATS =
+# TIME_INPUT_FORMATS =
+# DATETIME_INPUT_FORMATS =
+DECIMAL_SEPARATOR = ","
+THOUSAND_SEPARATOR = "."
+# NUMBER_GROUPING =
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar_DZ/LC_MESSAGES/django.mo b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar_DZ/LC_MESSAGES/django.mo
new file mode 100644
index 00000000..3c0e3240
Binary files /dev/null and b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar_DZ/LC_MESSAGES/django.mo differ
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar_DZ/LC_MESSAGES/django.po b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar_DZ/LC_MESSAGES/django.po
new file mode 100644
index 00000000..b32da348
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar_DZ/LC_MESSAGES/django.po
@@ -0,0 +1,1397 @@
+# This file is distributed under the same license as the Django package.
+#
+# Translators:
+# Jihad Bahmaid Al-Halki, 2022
+# Riterix , 2019-2020
+# Riterix , 2019
+msgid ""
+msgstr ""
+"Project-Id-Version: django\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2022-05-17 05:23-0500\n"
+"PO-Revision-Date: 2022-07-25 06:49+0000\n"
+"Last-Translator: Jihad Bahmaid Al-Halki\n"
+"Language-Team: Arabic (Algeria) (http://www.transifex.com/django/django/"
+"language/ar_DZ/)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Language: ar_DZ\n"
+"Plural-Forms: nplurals=6; plural=n==0 ? 0 : n==1 ? 1 : n==2 ? 2 : n%100>=3 "
+"&& n%100<=10 ? 3 : n%100>=11 && n%100<=99 ? 4 : 5;\n"
+
+msgid "Afrikaans"
+msgstr "الإفريقية"
+
+msgid "Arabic"
+msgstr "العربية"
+
+msgid "Algerian Arabic"
+msgstr "العربية الجزائرية"
+
+msgid "Asturian"
+msgstr "الأسترية"
+
+msgid "Azerbaijani"
+msgstr "الأذربيجانية"
+
+msgid "Bulgarian"
+msgstr "البلغارية"
+
+msgid "Belarusian"
+msgstr "البيلاروسية"
+
+msgid "Bengali"
+msgstr "البنغالية"
+
+msgid "Breton"
+msgstr "البريتونية"
+
+msgid "Bosnian"
+msgstr "البوسنية"
+
+msgid "Catalan"
+msgstr "الكتلانية"
+
+msgid "Czech"
+msgstr "التشيكية"
+
+msgid "Welsh"
+msgstr "الويلز"
+
+msgid "Danish"
+msgstr "الدنماركية"
+
+msgid "German"
+msgstr "الألمانية"
+
+msgid "Lower Sorbian"
+msgstr "الصربية السفلى"
+
+msgid "Greek"
+msgstr "اليونانية"
+
+msgid "English"
+msgstr "الإنجليزية"
+
+msgid "Australian English"
+msgstr "الإنجليزية الإسترالية"
+
+msgid "British English"
+msgstr "الإنجليزية البريطانية"
+
+msgid "Esperanto"
+msgstr "الاسبرانتو"
+
+msgid "Spanish"
+msgstr "الإسبانية"
+
+msgid "Argentinian Spanish"
+msgstr "الأسبانية الأرجنتينية"
+
+msgid "Colombian Spanish"
+msgstr "الكولومبية الإسبانية"
+
+msgid "Mexican Spanish"
+msgstr "الأسبانية المكسيكية"
+
+msgid "Nicaraguan Spanish"
+msgstr "الإسبانية النيكاراغوية"
+
+msgid "Venezuelan Spanish"
+msgstr "الإسبانية الفنزويلية"
+
+msgid "Estonian"
+msgstr "الإستونية"
+
+msgid "Basque"
+msgstr "الباسك"
+
+msgid "Persian"
+msgstr "الفارسية"
+
+msgid "Finnish"
+msgstr "الفنلندية"
+
+msgid "French"
+msgstr "الفرنسية"
+
+msgid "Frisian"
+msgstr "الفريزية"
+
+msgid "Irish"
+msgstr "الإيرلندية"
+
+msgid "Scottish Gaelic"
+msgstr "الغيلية الأسكتلندية"
+
+msgid "Galician"
+msgstr "الجليقية"
+
+msgid "Hebrew"
+msgstr "العبرية"
+
+msgid "Hindi"
+msgstr "الهندية"
+
+msgid "Croatian"
+msgstr "الكرواتية"
+
+msgid "Upper Sorbian"
+msgstr "الصربية العليا"
+
+msgid "Hungarian"
+msgstr "الهنغارية"
+
+msgid "Armenian"
+msgstr "الأرمنية"
+
+msgid "Interlingua"
+msgstr "اللغة الوسيطة"
+
+msgid "Indonesian"
+msgstr "الإندونيسية"
+
+msgid "Igbo"
+msgstr "إيبو"
+
+msgid "Ido"
+msgstr "ايدو"
+
+msgid "Icelandic"
+msgstr "الآيسلندية"
+
+msgid "Italian"
+msgstr "الإيطالية"
+
+msgid "Japanese"
+msgstr "اليابانية"
+
+msgid "Georgian"
+msgstr "الجورجية"
+
+msgid "Kabyle"
+msgstr "القبائلية"
+
+msgid "Kazakh"
+msgstr "الكازاخستانية"
+
+msgid "Khmer"
+msgstr "الخمر"
+
+msgid "Kannada"
+msgstr "الهندية (كنّادا)"
+
+msgid "Korean"
+msgstr "الكورية"
+
+msgid "Kyrgyz"
+msgstr "القيرغيزية"
+
+msgid "Luxembourgish"
+msgstr "اللوكسمبرجية"
+
+msgid "Lithuanian"
+msgstr "اللتوانية"
+
+msgid "Latvian"
+msgstr "اللاتفية"
+
+msgid "Macedonian"
+msgstr "المقدونية"
+
+msgid "Malayalam"
+msgstr "المايالام"
+
+msgid "Mongolian"
+msgstr "المنغولية"
+
+msgid "Marathi"
+msgstr "المهاراتية"
+
+msgid "Malay"
+msgstr "ملاي"
+
+msgid "Burmese"
+msgstr "البورمية"
+
+msgid "Norwegian Bokmål"
+msgstr "النرويجية"
+
+msgid "Nepali"
+msgstr "النيبالية"
+
+msgid "Dutch"
+msgstr "الهولندية"
+
+msgid "Norwegian Nynorsk"
+msgstr "النينورسك نرويجية"
+
+msgid "Ossetic"
+msgstr "الأوسيتيكية"
+
+msgid "Punjabi"
+msgstr "البنجابية"
+
+msgid "Polish"
+msgstr "البولندية"
+
+msgid "Portuguese"
+msgstr "البرتغالية"
+
+msgid "Brazilian Portuguese"
+msgstr "البرتغالية البرازيلية"
+
+msgid "Romanian"
+msgstr "الرومانية"
+
+msgid "Russian"
+msgstr "الروسية"
+
+msgid "Slovak"
+msgstr "السلوفاكية"
+
+msgid "Slovenian"
+msgstr "السلوفانية"
+
+msgid "Albanian"
+msgstr "الألبانية"
+
+msgid "Serbian"
+msgstr "الصربية"
+
+msgid "Serbian Latin"
+msgstr "اللاتينية الصربية"
+
+msgid "Swedish"
+msgstr "السويدية"
+
+msgid "Swahili"
+msgstr "السواحلية"
+
+msgid "Tamil"
+msgstr "التاميل"
+
+msgid "Telugu"
+msgstr "التيلوغو"
+
+msgid "Tajik"
+msgstr "الطاجيكية"
+
+msgid "Thai"
+msgstr "التايلندية"
+
+msgid "Turkmen"
+msgstr ""
+
+msgid "Turkish"
+msgstr "التركية"
+
+msgid "Tatar"
+msgstr "التتاريية"
+
+msgid "Udmurt"
+msgstr "الأدمرتية"
+
+msgid "Ukrainian"
+msgstr "الأكرانية"
+
+msgid "Urdu"
+msgstr "الأوردو"
+
+msgid "Uzbek"
+msgstr "الأوزبكية"
+
+msgid "Vietnamese"
+msgstr "الفيتنامية"
+
+msgid "Simplified Chinese"
+msgstr "الصينية المبسطة"
+
+msgid "Traditional Chinese"
+msgstr "الصينية التقليدية"
+
+msgid "Messages"
+msgstr "الرسائل"
+
+msgid "Site Maps"
+msgstr "خرائط الموقع"
+
+msgid "Static Files"
+msgstr "الملفات الثابتة"
+
+msgid "Syndication"
+msgstr "توظيف النشر"
+
+#. Translators: String used to replace omitted page numbers in elided page
+#. range generated by paginators, e.g. [1, 2, '…', 5, 6, 7, '…', 9, 10].
+msgid "…"
+msgstr ""
+
+msgid "That page number is not an integer"
+msgstr "رقم الصفحة ليس عددًا صحيحًا"
+
+msgid "That page number is less than 1"
+msgstr "رقم الصفحة أقل من 1"
+
+msgid "That page contains no results"
+msgstr "هذه الصفحة لا تحتوي على نتائج"
+
+msgid "Enter a valid value."
+msgstr "أدخل قيمة صحيحة."
+
+msgid "Enter a valid URL."
+msgstr "أدخل رابطاً صحيحاً."
+
+msgid "Enter a valid integer."
+msgstr "أدخل رقم صالح."
+
+msgid "Enter a valid email address."
+msgstr "أدخل عنوان بريد إلكتروني صحيح."
+
+#. Translators: "letters" means latin letters: a-z and A-Z.
+msgid ""
+"Enter a valid “slug” consisting of letters, numbers, underscores or hyphens."
+msgstr ""
+"أدخل “slug” صالحة تتكون من أحرف أو أرقام أو الشرطة السفلية أو الواصلات."
+
+msgid ""
+"Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or "
+"hyphens."
+msgstr ""
+"أدخل “slug” صالحة تتكون من أحرف Unicode أو الأرقام أو الشرطة السفلية أو "
+"الواصلات."
+
+msgid "Enter a valid IPv4 address."
+msgstr "أدخل عنوان IPv4 صحيح."
+
+msgid "Enter a valid IPv6 address."
+msgstr "أدخل عنوان IPv6 صحيح."
+
+msgid "Enter a valid IPv4 or IPv6 address."
+msgstr "أدخل عنوان IPv4 أو عنوان IPv6 صحيح."
+
+msgid "Enter only digits separated by commas."
+msgstr "أدخل أرقاما فقط مفصول بينها بفواصل."
+
+#, python-format
+msgid "Ensure this value is %(limit_value)s (it is %(show_value)s)."
+msgstr "تحقق من أن هذه القيمة هي %(limit_value)s (إنها %(show_value)s)."
+
+#, python-format
+msgid "Ensure this value is less than or equal to %(limit_value)s."
+msgstr "تحقق من أن تكون هذه القيمة أقل من %(limit_value)s أو مساوية لها."
+
+#, python-format
+msgid "Ensure this value is greater than or equal to %(limit_value)s."
+msgstr "تحقق من أن تكون هذه القيمة أكثر من %(limit_value)s أو مساوية لها."
+
+#, python-format
+msgid "Ensure this value is a multiple of step size %(limit_value)s."
+msgstr ""
+
+#, python-format
+msgid ""
+"Ensure this value has at least %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at least %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأقل (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[1] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأقل (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[2] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأقل (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[3] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأقل (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[4] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأقل (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[5] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأقل (هي تحتوي "
+"حالياً على %(show_value)d)."
+
+#, python-format
+msgid ""
+"Ensure this value has at most %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at most %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأكثر (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[1] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأكثر (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[2] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأكثر (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[3] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأكثر (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[4] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأكثر (هي تحتوي "
+"حالياً على %(show_value)d)."
+msgstr[5] ""
+"تأكد أن هذه القيمة تحتوي على %(limit_value)d حرف أو رمز على الأكثر (هي تحتوي "
+"حالياً على %(show_value)d)."
+
+msgid "Enter a number."
+msgstr "أدخل رقماً."
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s digit in total."
+msgid_plural "Ensure that there are no more than %(max)s digits in total."
+msgstr[0] "تحقق من أن تدخل %(max)s أرقام لا أكثر."
+msgstr[1] "تحقق من أن تدخل رقم %(max)s لا أكثر."
+msgstr[2] "تحقق من أن تدخل %(max)s رقمين لا أكثر."
+msgstr[3] "تحقق من أن تدخل %(max)s أرقام لا أكثر."
+msgstr[4] "تحقق من أن تدخل %(max)s أرقام لا أكثر."
+msgstr[5] "تحقق من أن تدخل %(max)s أرقام لا أكثر."
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s decimal place."
+msgid_plural "Ensure that there are no more than %(max)s decimal places."
+msgstr[0] "تحقق من أن تدخل %(max)s خانات عشرية لا أكثر."
+msgstr[1] "تحقق من أن تدخل %(max)s خانات عشرية لا أكثر."
+msgstr[2] "تحقق من أن تدخل %(max)s خانات عشرية لا أكثر."
+msgstr[3] "تحقق من أن تدخل %(max)s خانات عشرية لا أكثر."
+msgstr[4] "تحقق من أن تدخل %(max)s خانات عشرية لا أكثر."
+msgstr[5] "تحقق من أن تدخل %(max)s خانات عشرية لا أكثر."
+
+#, python-format
+msgid ""
+"Ensure that there are no more than %(max)s digit before the decimal point."
+msgid_plural ""
+"Ensure that there are no more than %(max)s digits before the decimal point."
+msgstr[0] "تحقق من أن تدخل %(max)s أرقام قبل الفاصل العشري لا أكثر."
+msgstr[1] "تحقق من أن تدخل %(max)s أرقام قبل الفاصل العشري لا أكثر."
+msgstr[2] "تحقق من أن تدخل %(max)s أرقام قبل الفاصل العشري لا أكثر."
+msgstr[3] "تحقق من أن تدخل %(max)s أرقام قبل الفاصل العشري لا أكثر."
+msgstr[4] "تحقق من أن تدخل %(max)s أرقام قبل الفاصل العشري لا أكثر."
+msgstr[5] "تحقق من أن تدخل %(max)s أرقام قبل الفاصل العشري لا أكثر."
+
+#, python-format
+msgid ""
+"File extension “%(extension)s” is not allowed. Allowed extensions are: "
+"%(allowed_extensions)s."
+msgstr ""
+"امتداد الملف “%(extension)s” غير مسموح به. الامتدادات المسموح بها هي:"
+"%(allowed_extensions)s."
+
+msgid "Null characters are not allowed."
+msgstr "لا يُسمح بالأحرف الخالية."
+
+msgid "and"
+msgstr "و"
+
+#, python-format
+msgid "%(model_name)s with this %(field_labels)s already exists."
+msgstr "%(model_name)s بهذا %(field_labels)s موجود سلفاً."
+
+#, python-format
+msgid "Constraint “%(name)s” is violated."
+msgstr ""
+
+#, python-format
+msgid "Value %(value)r is not a valid choice."
+msgstr "القيمة %(value)r ليست خيارا صحيحاً."
+
+msgid "This field cannot be null."
+msgstr "لا يمكن ترك هذا الحقل خالي."
+
+msgid "This field cannot be blank."
+msgstr "لا يمكن ترك هذا الحقل فارغاً."
+
+#, python-format
+msgid "%(model_name)s with this %(field_label)s already exists."
+msgstr "النموذج %(model_name)s والحقل %(field_label)s موجود مسبقاً."
+
+#. Translators: The 'lookup_type' is one of 'date', 'year' or
+#. 'month'. Eg: "Title must be unique for pub_date year"
+#, python-format
+msgid ""
+"%(field_label)s must be unique for %(date_field_label)s %(lookup_type)s."
+msgstr ""
+"%(field_label)s يجب أن يكون فريد لـ %(date_field_label)s %(lookup_type)s."
+
+#, python-format
+msgid "Field of type: %(field_type)s"
+msgstr "حقل نوع: %(field_type)s"
+
+#, python-format
+msgid "“%(value)s” value must be either True or False."
+msgstr "يجب أن تكون القيمة “%(value)s” إما True أو False."
+
+#, python-format
+msgid "“%(value)s” value must be either True, False, or None."
+msgstr "يجب أن تكون القيمة “%(value)s” إما True أو False أو None."
+
+msgid "Boolean (Either True or False)"
+msgstr "ثنائي (إما True أو False)"
+
+#, python-format
+msgid "String (up to %(max_length)s)"
+msgstr "سلسلة نص (%(max_length)s كحد أقصى)"
+
+msgid "Comma-separated integers"
+msgstr "أرقام صحيحة مفصولة بفواصل"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid date format. It must be in YYYY-MM-DD "
+"format."
+msgstr ""
+"تحتوي القيمة “%(value)s” على تنسيق تاريخ غير صالح. يجب أن يكون بتنسيق YYYY-"
+"MM-DD."
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD) but it is an invalid "
+"date."
+msgstr ""
+"تحتوي القيمة “%(value)s” على التنسيق الصحيح (YYYY-MM-DD) ولكنه تاريخ غير "
+"صالح."
+
+msgid "Date (without time)"
+msgstr "التاريخ (دون الوقت)"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in YYYY-MM-DD HH:MM[:ss[."
+"uuuuuu]][TZ] format."
+msgstr ""
+"تحتوي القيمة “%(value)s” على تنسيق غير صالح. يجب أن يكون بتنسيق YYYY-MM-DD "
+"HH: MM [: ss [.uuuuuu]] [TZ]."
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD HH:MM[:ss[.uuuuuu]]"
+"[TZ]) but it is an invalid date/time."
+msgstr ""
+"تحتوي القيمة “%(value)s” على التنسيق الصحيح (YYYY-MM-DD HH: MM [: ss [."
+"uuuuuu]] [TZ]) ولكنها تعد تاريخًا / وقتًا غير صالحين."
+
+msgid "Date (with time)"
+msgstr "التاريخ (مع الوقت)"
+
+#, python-format
+msgid "“%(value)s” value must be a decimal number."
+msgstr "يجب أن تكون القيمة “%(value)s” رقمًا عشريًا."
+
+msgid "Decimal number"
+msgstr "رقم عشري"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in [DD] [[HH:]MM:]ss[."
+"uuuuuu] format."
+msgstr ""
+"تحتوي القيمة “%(value)s” على تنسيق غير صالح. يجب أن يكون بتنسيق [DD] [[HH:] "
+"MM:] ss [.uuuuuu]."
+
+msgid "Duration"
+msgstr "المدّة"
+
+msgid "Email address"
+msgstr "عنوان بريد إلكتروني"
+
+msgid "File path"
+msgstr "مسار الملف"
+
+#, python-format
+msgid "“%(value)s” value must be a float."
+msgstr "يجب أن تكون القيمة “%(value)s” قيمة عائمة."
+
+msgid "Floating point number"
+msgstr "رقم فاصلة عائمة"
+
+#, python-format
+msgid "“%(value)s” value must be an integer."
+msgstr "يجب أن تكون القيمة “%(value)s” عددًا صحيحًا."
+
+msgid "Integer"
+msgstr "عدد صحيح"
+
+msgid "Big (8 byte) integer"
+msgstr "عدد صحيح كبير (8 بايت)"
+
+msgid "Small integer"
+msgstr "عدد صحيح صغير"
+
+msgid "IPv4 address"
+msgstr "عنوان IPv4"
+
+msgid "IP address"
+msgstr "عنوان IP"
+
+#, python-format
+msgid "“%(value)s” value must be either None, True or False."
+msgstr "يجب أن تكون القيمة “%(value)s” إما None أو True أو False."
+
+msgid "Boolean (Either True, False or None)"
+msgstr "ثنائي (إما True أو False أو None)"
+
+msgid "Positive big integer"
+msgstr "عدد صحيح كبير موجب"
+
+msgid "Positive integer"
+msgstr "عدد صحيح موجب"
+
+msgid "Positive small integer"
+msgstr "عدد صحيح صغير موجب"
+
+#, python-format
+msgid "Slug (up to %(max_length)s)"
+msgstr "Slug (حتى %(max_length)s)"
+
+msgid "Text"
+msgstr "نص"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in HH:MM[:ss[.uuuuuu]] "
+"format."
+msgstr ""
+"تحتوي القيمة “%(value)s” على تنسيق غير صالح. يجب أن يكون بتنسيق HH: MM [: ss "
+"[.uuuuuu]]."
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (HH:MM[:ss[.uuuuuu]]) but it is an "
+"invalid time."
+msgstr ""
+"تحتوي القيمة “%(value)s” على التنسيق الصحيح (HH: MM [: ss [.uuuuuu]]) ولكنه "
+"وقت غير صالح."
+
+msgid "Time"
+msgstr "وقت"
+
+msgid "URL"
+msgstr "رابط"
+
+msgid "Raw binary data"
+msgstr "البيانات الثنائية الخام"
+
+#, python-format
+msgid "“%(value)s” is not a valid UUID."
+msgstr "“%(value)s” ليس UUID صالحًا."
+
+msgid "Universally unique identifier"
+msgstr "المعرف الفريد العالمي (UUID)"
+
+msgid "File"
+msgstr "ملف"
+
+msgid "Image"
+msgstr "صورة"
+
+msgid "A JSON object"
+msgstr "كائن JSON"
+
+msgid "Value must be valid JSON."
+msgstr "يجب أن تكون قيمة JSON صالحة."
+
+#, python-format
+msgid "%(model)s instance with %(field)s %(value)r does not exist."
+msgstr "النموذج %(model)s ذو الحقل و القيمة %(field)s %(value)r غير موجود."
+
+msgid "Foreign Key (type determined by related field)"
+msgstr "الحقل المرتبط (تم تحديد النوع وفقاً للحقل المرتبط)"
+
+msgid "One-to-one relationship"
+msgstr "علاقة واحد إلى واحد"
+
+#, python-format
+msgid "%(from)s-%(to)s relationship"
+msgstr "%(from)s-%(to)s علاقة"
+
+#, python-format
+msgid "%(from)s-%(to)s relationships"
+msgstr "%(from)s-%(to)s علاقات"
+
+msgid "Many-to-many relationship"
+msgstr "علاقة متعدد إلى متعدد"
+
+#. Translators: If found as last label character, these punctuation
+#. characters will prevent the default label_suffix to be appended to the
+#. label
+msgid ":?.!"
+msgstr ":?.!"
+
+msgid "This field is required."
+msgstr "هذا الحقل مطلوب."
+
+msgid "Enter a whole number."
+msgstr "أدخل رقما صحيحا."
+
+msgid "Enter a valid date."
+msgstr "أدخل تاريخاً صحيحاً."
+
+msgid "Enter a valid time."
+msgstr "أدخل وقتاً صحيحاً."
+
+msgid "Enter a valid date/time."
+msgstr "أدخل تاريخاً/وقتاً صحيحاً."
+
+msgid "Enter a valid duration."
+msgstr "أدخل مدّة صحيحة"
+
+#, python-brace-format
+msgid "The number of days must be between {min_days} and {max_days}."
+msgstr "يجب أن يتراوح عدد الأيام بين {min_days} و {max_days}."
+
+msgid "No file was submitted. Check the encoding type on the form."
+msgstr "لم يتم ارسال ملف، الرجاء التأكد من نوع ترميز الاستمارة."
+
+msgid "No file was submitted."
+msgstr "لم يتم إرسال اي ملف."
+
+msgid "The submitted file is empty."
+msgstr "الملف الذي قمت بإرساله فارغ."
+
+#, python-format
+msgid "Ensure this filename has at most %(max)d character (it has %(length)d)."
+msgid_plural ""
+"Ensure this filename has at most %(max)d characters (it has %(length)d)."
+msgstr[0] ""
+"تأكد أن إسم هذا الملف يحتوي على %(max)d حرف على الأكثر (هو يحتوي الآن على "
+"%(length)d حرف)."
+msgstr[1] ""
+"تأكد أن إسم هذا الملف يحتوي على %(max)d حرف على الأكثر (هو يحتوي الآن على "
+"%(length)d حرف)."
+msgstr[2] ""
+"تأكد أن إسم هذا الملف يحتوي على %(max)d حرف على الأكثر (هو يحتوي الآن على "
+"%(length)d حرف)."
+msgstr[3] ""
+"تأكد أن إسم هذا الملف يحتوي على %(max)d حرف على الأكثر (هو يحتوي الآن على "
+"%(length)d حرف)."
+msgstr[4] ""
+"تأكد أن إسم هذا الملف يحتوي على %(max)d حرف على الأكثر (هو يحتوي الآن على "
+"%(length)d حرف)."
+msgstr[5] ""
+"تأكد أن إسم هذا الملف يحتوي على %(max)d حرف على الأكثر (هو يحتوي الآن على "
+"%(length)d حرف)."
+
+msgid "Please either submit a file or check the clear checkbox, not both."
+msgstr ""
+"رجاءً أرسل ملف أو صح علامة صح عند مربع اختيار \\\"فارغ\\\"، وليس كلاهما."
+
+msgid ""
+"Upload a valid image. The file you uploaded was either not an image or a "
+"corrupted image."
+msgstr ""
+"قم برفع صورة صحيحة، الملف الذي قمت برفعه إما أنه ليس ملفا لصورة أو أنه ملف "
+"معطوب."
+
+#, python-format
+msgid "Select a valid choice. %(value)s is not one of the available choices."
+msgstr "انتق خياراً صحيحاً. %(value)s ليس أحد الخيارات المتاحة."
+
+msgid "Enter a list of values."
+msgstr "أدخل قائمة من القيم."
+
+msgid "Enter a complete value."
+msgstr "إدخال قيمة كاملة."
+
+msgid "Enter a valid UUID."
+msgstr "أدخل قيمة UUID صحيحة."
+
+msgid "Enter a valid JSON."
+msgstr "ادخل كائن JSON صالح."
+
+#. Translators: This is the default suffix added to form field labels
+msgid ":"
+msgstr ":"
+
+#, python-format
+msgid "(Hidden field %(name)s) %(error)s"
+msgstr "(الحقل الخفي %(name)s) %(error)s"
+
+#, python-format
+msgid ""
+"ManagementForm data is missing or has been tampered with. Missing fields: "
+"%(field_names)s. You may need to file a bug report if the issue persists."
+msgstr ""
+"نموذج بيانات الإدارة مفقود أو تم العبث به. %(field_names)sمن الحقول مفقود. "
+"قد تحتاج إلى رفع تقرير بالمشكلة إن استمرت الحالة."
+
+#, python-format
+msgid "Please submit at most %(num)d form."
+msgid_plural "Please submit at most %(num)d forms."
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+msgstr[5] ""
+
+#, python-format
+msgid "Please submit at least %(num)d form."
+msgid_plural "Please submit at least %(num)d forms."
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+msgstr[5] ""
+
+msgid "Order"
+msgstr "الترتيب"
+
+msgid "Delete"
+msgstr "احذف"
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s."
+msgstr "رجاء صحّح بيانات %(field)s المتكررة."
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s, which must be unique."
+msgstr "رجاء صحّح بيانات %(field)s المتكررة والتي يجب أن تكون مُميّزة."
+
+#, python-format
+msgid ""
+"Please correct the duplicate data for %(field_name)s which must be unique "
+"for the %(lookup)s in %(date_field)s."
+msgstr ""
+"رجاء صحّح بيانات %(field_name)s المتكررة والتي يجب أن تكون مُميّزة لـ%(lookup)s "
+"في %(date_field)s."
+
+msgid "Please correct the duplicate values below."
+msgstr "رجاءً صحّح القيم المُكرّرة أدناه."
+
+msgid "The inline value did not match the parent instance."
+msgstr "القيمة المضمنة لا تتطابق مع المثيل الأصلي."
+
+msgid "Select a valid choice. That choice is not one of the available choices."
+msgstr "انتق خياراً صحيحاً. اختيارك ليس أحد الخيارات المتاحة."
+
+#, python-format
+msgid "“%(pk)s” is not a valid value."
+msgstr "“%(pk)s” ليست قيمة صالحة."
+
+#, python-format
+msgid ""
+"%(datetime)s couldn’t be interpreted in time zone %(current_timezone)s; it "
+"may be ambiguous or it may not exist."
+msgstr ""
+"لا يمكن تفسير٪ %(datetime)s في المنطقة الزمنية٪ %(current_timezone)s؛ قد "
+"تكون غامضة أو غير موجودة."
+
+msgid "Clear"
+msgstr "تفريغ"
+
+msgid "Currently"
+msgstr "حالياً"
+
+msgid "Change"
+msgstr "عدّل"
+
+msgid "Unknown"
+msgstr "مجهول"
+
+msgid "Yes"
+msgstr "نعم"
+
+msgid "No"
+msgstr "لا"
+
+#. Translators: Please do not add spaces around commas.
+msgid "yes,no,maybe"
+msgstr "نعم,لا,ربما"
+
+#, python-format
+msgid "%(size)d byte"
+msgid_plural "%(size)d bytes"
+msgstr[0] "%(size)d بايت"
+msgstr[1] "%(size)d بايت واحد "
+msgstr[2] "%(size)d بايتان"
+msgstr[3] "%(size)d بايت"
+msgstr[4] "%(size)d بايت"
+msgstr[5] "%(size)d بايت"
+
+#, python-format
+msgid "%s KB"
+msgstr "%s ك.ب"
+
+#, python-format
+msgid "%s MB"
+msgstr "%s م.ب"
+
+#, python-format
+msgid "%s GB"
+msgstr "%s ج.ب"
+
+#, python-format
+msgid "%s TB"
+msgstr "%s ت.ب"
+
+#, python-format
+msgid "%s PB"
+msgstr "%s ب.ب"
+
+msgid "p.m."
+msgstr "م"
+
+msgid "a.m."
+msgstr "ص"
+
+msgid "PM"
+msgstr "م"
+
+msgid "AM"
+msgstr "ص"
+
+msgid "midnight"
+msgstr "منتصف الليل"
+
+msgid "noon"
+msgstr "ظهراً"
+
+msgid "Monday"
+msgstr "الاثنين"
+
+msgid "Tuesday"
+msgstr "الثلاثاء"
+
+msgid "Wednesday"
+msgstr "الأربعاء"
+
+msgid "Thursday"
+msgstr "الخميس"
+
+msgid "Friday"
+msgstr "الجمعة"
+
+msgid "Saturday"
+msgstr "السبت"
+
+msgid "Sunday"
+msgstr "الأحد"
+
+msgid "Mon"
+msgstr "إثنين"
+
+msgid "Tue"
+msgstr "ثلاثاء"
+
+msgid "Wed"
+msgstr "أربعاء"
+
+msgid "Thu"
+msgstr "خميس"
+
+msgid "Fri"
+msgstr "جمعة"
+
+msgid "Sat"
+msgstr "سبت"
+
+msgid "Sun"
+msgstr "أحد"
+
+msgid "January"
+msgstr "جانفي"
+
+msgid "February"
+msgstr "فيفري"
+
+msgid "March"
+msgstr "مارس"
+
+msgid "April"
+msgstr "أفريل"
+
+msgid "May"
+msgstr "ماي"
+
+msgid "June"
+msgstr "جوان"
+
+msgid "July"
+msgstr "جويليه"
+
+msgid "August"
+msgstr "أوت"
+
+msgid "September"
+msgstr "سبتمبر"
+
+msgid "October"
+msgstr "أكتوبر"
+
+msgid "November"
+msgstr "نوفمبر"
+
+msgid "December"
+msgstr "ديسمبر"
+
+msgid "jan"
+msgstr "جانفي"
+
+msgid "feb"
+msgstr "فيفري"
+
+msgid "mar"
+msgstr "مارس"
+
+msgid "apr"
+msgstr "أفريل"
+
+msgid "may"
+msgstr "ماي"
+
+msgid "jun"
+msgstr "جوان"
+
+msgid "jul"
+msgstr "جويليه"
+
+msgid "aug"
+msgstr "أوت"
+
+msgid "sep"
+msgstr "سبتمبر"
+
+msgid "oct"
+msgstr "أكتوبر"
+
+msgid "nov"
+msgstr "نوفمبر"
+
+msgid "dec"
+msgstr "ديسمبر"
+
+msgctxt "abbrev. month"
+msgid "Jan."
+msgstr "جانفي"
+
+msgctxt "abbrev. month"
+msgid "Feb."
+msgstr "فيفري"
+
+msgctxt "abbrev. month"
+msgid "March"
+msgstr "مارس"
+
+msgctxt "abbrev. month"
+msgid "April"
+msgstr "أفريل"
+
+msgctxt "abbrev. month"
+msgid "May"
+msgstr "ماي"
+
+msgctxt "abbrev. month"
+msgid "June"
+msgstr "جوان"
+
+msgctxt "abbrev. month"
+msgid "July"
+msgstr "جويليه"
+
+msgctxt "abbrev. month"
+msgid "Aug."
+msgstr "أوت"
+
+msgctxt "abbrev. month"
+msgid "Sept."
+msgstr "سبتمبر"
+
+msgctxt "abbrev. month"
+msgid "Oct."
+msgstr "أكتوبر"
+
+msgctxt "abbrev. month"
+msgid "Nov."
+msgstr "نوفمبر"
+
+msgctxt "abbrev. month"
+msgid "Dec."
+msgstr "ديسمبر"
+
+msgctxt "alt. month"
+msgid "January"
+msgstr "جانفي"
+
+msgctxt "alt. month"
+msgid "February"
+msgstr "فيفري"
+
+msgctxt "alt. month"
+msgid "March"
+msgstr "مارس"
+
+msgctxt "alt. month"
+msgid "April"
+msgstr "أفريل"
+
+msgctxt "alt. month"
+msgid "May"
+msgstr "ماي"
+
+msgctxt "alt. month"
+msgid "June"
+msgstr "جوان"
+
+msgctxt "alt. month"
+msgid "July"
+msgstr "جويليه"
+
+msgctxt "alt. month"
+msgid "August"
+msgstr "أوت"
+
+msgctxt "alt. month"
+msgid "September"
+msgstr "سبتمبر"
+
+msgctxt "alt. month"
+msgid "October"
+msgstr "أكتوبر"
+
+msgctxt "alt. month"
+msgid "November"
+msgstr "نوفمبر"
+
+msgctxt "alt. month"
+msgid "December"
+msgstr "ديسمبر"
+
+msgid "This is not a valid IPv6 address."
+msgstr "هذا ليس عنوان IPv6 صحيح."
+
+#, python-format
+msgctxt "String to return when truncating text"
+msgid "%(truncated_text)s…"
+msgstr "%(truncated_text)s…"
+
+msgid "or"
+msgstr "أو"
+
+#. Translators: This string is used as a separator between list elements
+msgid ", "
+msgstr "، "
+
+#, python-format
+msgid "%(num)d year"
+msgid_plural "%(num)d years"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+msgstr[5] ""
+
+#, python-format
+msgid "%(num)d month"
+msgid_plural "%(num)d months"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+msgstr[5] ""
+
+#, python-format
+msgid "%(num)d week"
+msgid_plural "%(num)d weeks"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+msgstr[5] ""
+
+#, python-format
+msgid "%(num)d day"
+msgid_plural "%(num)d days"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+msgstr[5] ""
+
+#, python-format
+msgid "%(num)d hour"
+msgid_plural "%(num)d hours"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+msgstr[5] ""
+
+#, python-format
+msgid "%(num)d minute"
+msgid_plural "%(num)d minutes"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+msgstr[5] ""
+
+msgid "Forbidden"
+msgstr "ممنوع"
+
+msgid "CSRF verification failed. Request aborted."
+msgstr "تم الفشل للتحقق من CSRF. تم إنهاء الطلب."
+
+msgid ""
+"You are seeing this message because this HTTPS site requires a “Referer "
+"header” to be sent by your web browser, but none was sent. This header is "
+"required for security reasons, to ensure that your browser is not being "
+"hijacked by third parties."
+msgstr ""
+"أنت ترى هذه الرسالة لأن موقع HTTPS هذا يتطلب \"عنوان مرجعي\" ليتم إرساله "
+"بواسطة متصفح الويب الخاص بك ، ولكن لم يتم إرسال أي شيء. هذا العنوان مطلوب "
+"لأسباب أمنية ، لضمان عدم اختراق متصفحك من قبل أطراف أخرى."
+
+msgid ""
+"If you have configured your browser to disable “Referer” headers, please re-"
+"enable them, at least for this site, or for HTTPS connections, or for “same-"
+"origin” requests."
+msgstr ""
+"إذا قمت بتكوين المستعرض الخاص بك لتعطيل رؤوس “Referer” ، فالرجاء إعادة "
+"تمكينها ، على الأقل لهذا الموقع ، أو لاتصالات HTTPS ، أو لطلبات “same-"
+"origin”."
+
+msgid ""
+"If you are using the tag or "
+"including the “Referrer-Policy: no-referrer” header, please remove them. The "
+"CSRF protection requires the “Referer” header to do strict referer checking. "
+"If you’re concerned about privacy, use alternatives like for links to third-party sites."
+msgstr ""
+"إذا كنت تستخدم العلامة أو تتضمن رأس “Referrer-Policy: no-referrer” ، فيرجى إزالتها. تتطلب حماية "
+"CSRF رأس “Referer” القيام بالتحقق من “strict referer”. إذا كنت مهتمًا "
+"بالخصوصية ، فاستخدم بدائل مثل للروابط إلى مواقع "
+"الجهات الخارجية."
+
+msgid ""
+"You are seeing this message because this site requires a CSRF cookie when "
+"submitting forms. This cookie is required for security reasons, to ensure "
+"that your browser is not being hijacked by third parties."
+msgstr ""
+"تشاهد هذه الرسالة لأن هذا الموقع يتطلب ملف تعريف ارتباط CSRF Cookie عند "
+"إرسال النماذج. ملف تعريف ارتباط Cookie هذا مطلوب لأسباب أمنية ، لضمان عدم "
+"اختطاف متصفحك من قبل أطراف ثالثة."
+
+msgid ""
+"If you have configured your browser to disable cookies, please re-enable "
+"them, at least for this site, or for “same-origin” requests."
+msgstr ""
+"إذا قمت بتكوين المستعرض الخاص بك لتعطيل ملفات تعريف الارتباط Cookies ، يرجى "
+"إعادة تمكينها ، على الأقل لهذا الموقع ، أو لطلبات “same-origin”."
+
+msgid "More information is available with DEBUG=True."
+msgstr "يتوفر مزيد من المعلومات عند ضبط الخيار DEBUG=True."
+
+msgid "No year specified"
+msgstr "لم تحدد السنة"
+
+msgid "Date out of range"
+msgstr "تاريخ خارج النطاق"
+
+msgid "No month specified"
+msgstr "لم تحدد الشهر"
+
+msgid "No day specified"
+msgstr "لم تحدد اليوم"
+
+msgid "No week specified"
+msgstr "لم تحدد الأسبوع"
+
+#, python-format
+msgid "No %(verbose_name_plural)s available"
+msgstr "لا يوجد %(verbose_name_plural)s"
+
+#, python-format
+msgid ""
+"Future %(verbose_name_plural)s not available because %(class_name)s."
+"allow_future is False."
+msgstr ""
+"التاريخ بالمستقبل %(verbose_name_plural)s غير متوفر لأن قيمة %(class_name)s."
+"allow_future هي False."
+
+#, python-format
+msgid "Invalid date string “%(datestr)s” given format “%(format)s”"
+msgstr "سلسلة تاريخ غير صالحة “%(datestr)s” شكل معين “%(format)s”"
+
+#, python-format
+msgid "No %(verbose_name)s found matching the query"
+msgstr "لم يعثر على أي %(verbose_name)s مطابقة لهذا الإستعلام"
+
+msgid "Page is not “last”, nor can it be converted to an int."
+msgstr "الصفحة ليست \"الأخيرة\" ، ولا يمكن تحويلها إلى عدد صحيح."
+
+#, python-format
+msgid "Invalid page (%(page_number)s): %(message)s"
+msgstr "صفحة خاطئة (%(page_number)s): %(message)s"
+
+#, python-format
+msgid "Empty list and “%(class_name)s.allow_empty” is False."
+msgstr "القائمة فارغة و “%(class_name)s.allow_empty” هي False."
+
+msgid "Directory indexes are not allowed here."
+msgstr "لا يسمح لفهارس الدليل هنا."
+
+#, python-format
+msgid "“%(path)s” does not exist"
+msgstr "“%(path)s” غير موجود"
+
+#, python-format
+msgid "Index of %(directory)s"
+msgstr "فهرس لـ %(directory)s"
+
+msgid "The install worked successfully! Congratulations!"
+msgstr "تمَّت عملية التثبيت بنجاح! تهانينا!"
+
+#, python-format
+msgid ""
+"View release notes for Django %(version)s"
+msgstr ""
+"عرض ملاحظات الإصدار ل جانغو "
+"%(version)s"
+
+#, python-format
+msgid ""
+"You are seeing this page because DEBUG=True is in your settings file and you have not configured any "
+"URLs."
+msgstr ""
+"تشاهد هذه الصفحة لأن DEBUG = True موجود في ملف الإعدادات الخاص بك ولم تقم بتكوين أي "
+"عناوين URL."
+
+msgid "Django Documentation"
+msgstr "توثيق جانغو"
+
+msgid "Topics, references, & how-to’s"
+msgstr "الموضوعات ، المراجع، & الكيفية"
+
+msgid "Tutorial: A Polling App"
+msgstr "البرنامج التعليمي: تطبيق الاقتراع"
+
+msgid "Get started with Django"
+msgstr "الخطوات الأولى مع جانغو"
+
+msgid "Django Community"
+msgstr "مجتمع جانغو"
+
+msgid "Connect, get help, or contribute"
+msgstr "الاتصال، الحصول على المساعدة أو المساهمة"
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar_DZ/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar_DZ/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar_DZ/formats.py b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar_DZ/formats.py
new file mode 100644
index 00000000..cbd361d6
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ar_DZ/formats.py
@@ -0,0 +1,29 @@
+# This file is distributed under the same license as the Django package.
+#
+# The *_FORMAT strings use the Django date format syntax,
+# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+DATE_FORMAT = "j F Y"
+TIME_FORMAT = "H:i"
+DATETIME_FORMAT = "j F Y H:i"
+YEAR_MONTH_FORMAT = "F Y"
+MONTH_DAY_FORMAT = "j F"
+SHORT_DATE_FORMAT = "j F Y"
+SHORT_DATETIME_FORMAT = "j F Y H:i"
+FIRST_DAY_OF_WEEK = 0 # Sunday
+
+# The *_INPUT_FORMATS strings use the Python strftime format syntax,
+# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
+DATE_INPUT_FORMATS = [
+ "%Y/%m/%d", # '2006/10/25'
+]
+TIME_INPUT_FORMATS = [
+ "%H:%M", # '14:30
+ "%H:%M:%S", # '14:30:59'
+]
+DATETIME_INPUT_FORMATS = [
+ "%Y/%m/%d %H:%M", # '2006/10/25 14:30'
+ "%Y/%m/%d %H:%M:%S", # '2006/10/25 14:30:59'
+]
+DECIMAL_SEPARATOR = ","
+THOUSAND_SEPARATOR = "."
+NUMBER_GROUPING = 3
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ast/LC_MESSAGES/django.mo b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ast/LC_MESSAGES/django.mo
new file mode 100644
index 00000000..31733b2e
Binary files /dev/null and b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ast/LC_MESSAGES/django.mo differ
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ast/LC_MESSAGES/django.po b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ast/LC_MESSAGES/django.po
new file mode 100644
index 00000000..417f18db
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ast/LC_MESSAGES/django.po
@@ -0,0 +1,1237 @@
+# This file is distributed under the same license as the Django package.
+#
+# Translators:
+# Ḷḷumex03 , 2014
+msgid ""
+msgstr ""
+"Project-Id-Version: django\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2019-09-27 22:40+0200\n"
+"PO-Revision-Date: 2019-11-05 00:38+0000\n"
+"Last-Translator: Ramiro Morales\n"
+"Language-Team: Asturian (http://www.transifex.com/django/django/language/"
+"ast/)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Language: ast\n"
+"Plural-Forms: nplurals=2; plural=(n != 1);\n"
+
+msgid "Afrikaans"
+msgstr "Afrikáans"
+
+msgid "Arabic"
+msgstr "Árabe"
+
+msgid "Asturian"
+msgstr ""
+
+msgid "Azerbaijani"
+msgstr "Azerbaixanu"
+
+msgid "Bulgarian"
+msgstr "Búlgaru"
+
+msgid "Belarusian"
+msgstr "Bielorrusu"
+
+msgid "Bengali"
+msgstr "Bengalí"
+
+msgid "Breton"
+msgstr "Bretón"
+
+msgid "Bosnian"
+msgstr "Bosniu"
+
+msgid "Catalan"
+msgstr "Catalán"
+
+msgid "Czech"
+msgstr "Checu"
+
+msgid "Welsh"
+msgstr "Galés"
+
+msgid "Danish"
+msgstr "Danés"
+
+msgid "German"
+msgstr "Alemán"
+
+msgid "Lower Sorbian"
+msgstr ""
+
+msgid "Greek"
+msgstr "Griegu"
+
+msgid "English"
+msgstr "Inglés"
+
+msgid "Australian English"
+msgstr ""
+
+msgid "British English"
+msgstr "Inglés británicu"
+
+msgid "Esperanto"
+msgstr "Esperantu"
+
+msgid "Spanish"
+msgstr "Castellán"
+
+msgid "Argentinian Spanish"
+msgstr "Español arxentín"
+
+msgid "Colombian Spanish"
+msgstr ""
+
+msgid "Mexican Spanish"
+msgstr "Español mexicanu"
+
+msgid "Nicaraguan Spanish"
+msgstr "Español nicaraguanu"
+
+msgid "Venezuelan Spanish"
+msgstr "Español venezolanu"
+
+msgid "Estonian"
+msgstr "Estoniu"
+
+msgid "Basque"
+msgstr "Vascu"
+
+msgid "Persian"
+msgstr "Persa"
+
+msgid "Finnish"
+msgstr "Finés"
+
+msgid "French"
+msgstr "Francés"
+
+msgid "Frisian"
+msgstr "Frisón"
+
+msgid "Irish"
+msgstr "Irlandés"
+
+msgid "Scottish Gaelic"
+msgstr ""
+
+msgid "Galician"
+msgstr "Gallegu"
+
+msgid "Hebrew"
+msgstr "Hebréu"
+
+msgid "Hindi"
+msgstr "Hindi"
+
+msgid "Croatian"
+msgstr "Croata"
+
+msgid "Upper Sorbian"
+msgstr ""
+
+msgid "Hungarian"
+msgstr "Húngaru"
+
+msgid "Armenian"
+msgstr ""
+
+msgid "Interlingua"
+msgstr "Interlingua"
+
+msgid "Indonesian"
+msgstr "Indonesiu"
+
+msgid "Ido"
+msgstr ""
+
+msgid "Icelandic"
+msgstr "Islandés"
+
+msgid "Italian"
+msgstr "Italianu"
+
+msgid "Japanese"
+msgstr "Xaponés"
+
+msgid "Georgian"
+msgstr "Xeorxanu"
+
+msgid "Kabyle"
+msgstr ""
+
+msgid "Kazakh"
+msgstr "Kazakh"
+
+msgid "Khmer"
+msgstr "Khmer"
+
+msgid "Kannada"
+msgstr "Canarés"
+
+msgid "Korean"
+msgstr "Coreanu"
+
+msgid "Luxembourgish"
+msgstr "Luxemburgués"
+
+msgid "Lithuanian"
+msgstr "Lituanu"
+
+msgid "Latvian"
+msgstr "Letón"
+
+msgid "Macedonian"
+msgstr "Macedoniu"
+
+msgid "Malayalam"
+msgstr "Malayalam"
+
+msgid "Mongolian"
+msgstr "Mongol"
+
+msgid "Marathi"
+msgstr ""
+
+msgid "Burmese"
+msgstr "Birmanu"
+
+msgid "Norwegian Bokmål"
+msgstr ""
+
+msgid "Nepali"
+msgstr "Nepalí"
+
+msgid "Dutch"
+msgstr "Holandés"
+
+msgid "Norwegian Nynorsk"
+msgstr "Nynorsk noruegu"
+
+msgid "Ossetic"
+msgstr "Osetiu"
+
+msgid "Punjabi"
+msgstr "Punjabi"
+
+msgid "Polish"
+msgstr "Polacu"
+
+msgid "Portuguese"
+msgstr "Portugués"
+
+msgid "Brazilian Portuguese"
+msgstr "Portugués brasileñu"
+
+msgid "Romanian"
+msgstr "Rumanu"
+
+msgid "Russian"
+msgstr "Rusu"
+
+msgid "Slovak"
+msgstr "Eslovacu"
+
+msgid "Slovenian"
+msgstr "Eslovenu"
+
+msgid "Albanian"
+msgstr "Albanu"
+
+msgid "Serbian"
+msgstr "Serbiu"
+
+msgid "Serbian Latin"
+msgstr "Serbiu llatín"
+
+msgid "Swedish"
+msgstr "Suecu"
+
+msgid "Swahili"
+msgstr "Suaḥili"
+
+msgid "Tamil"
+msgstr "Tamil"
+
+msgid "Telugu"
+msgstr "Telugu"
+
+msgid "Thai"
+msgstr "Tailandés"
+
+msgid "Turkish"
+msgstr "Turcu"
+
+msgid "Tatar"
+msgstr "Tatar"
+
+msgid "Udmurt"
+msgstr "Udmurtu"
+
+msgid "Ukrainian"
+msgstr "Ucranianu"
+
+msgid "Urdu"
+msgstr "Urdu"
+
+msgid "Uzbek"
+msgstr ""
+
+msgid "Vietnamese"
+msgstr "Vietnamita"
+
+msgid "Simplified Chinese"
+msgstr "Chinu simplificáu"
+
+msgid "Traditional Chinese"
+msgstr "Chinu tradicional"
+
+msgid "Messages"
+msgstr ""
+
+msgid "Site Maps"
+msgstr ""
+
+msgid "Static Files"
+msgstr ""
+
+msgid "Syndication"
+msgstr ""
+
+msgid "That page number is not an integer"
+msgstr ""
+
+msgid "That page number is less than 1"
+msgstr ""
+
+msgid "That page contains no results"
+msgstr ""
+
+msgid "Enter a valid value."
+msgstr "Introduz un valor válidu."
+
+msgid "Enter a valid URL."
+msgstr "Introduz una URL válida."
+
+msgid "Enter a valid integer."
+msgstr ""
+
+msgid "Enter a valid email address."
+msgstr "Introduz una direición de corréu válida."
+
+#. Translators: "letters" means latin letters: a-z and A-Z.
+msgid ""
+"Enter a valid “slug” consisting of letters, numbers, underscores or hyphens."
+msgstr ""
+
+msgid ""
+"Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or "
+"hyphens."
+msgstr ""
+
+msgid "Enter a valid IPv4 address."
+msgstr "Introduz una direición IPv4 válida."
+
+msgid "Enter a valid IPv6 address."
+msgstr "Introduz una direición IPv6 válida."
+
+msgid "Enter a valid IPv4 or IPv6 address."
+msgstr "Introduz una direición IPv4 o IPv6 válida."
+
+msgid "Enter only digits separated by commas."
+msgstr "Introduz namái díxitos separtaos per comes."
+
+#, python-format
+msgid "Ensure this value is %(limit_value)s (it is %(show_value)s)."
+msgstr "Asegúrate qu'esti valor ye %(limit_value)s (ye %(show_value)s)."
+
+#, python-format
+msgid "Ensure this value is less than or equal to %(limit_value)s."
+msgstr "Asegúrate qu'esti valor ye menor o igual a %(limit_value)s."
+
+#, python-format
+msgid "Ensure this value is greater than or equal to %(limit_value)s."
+msgstr "Asegúrate qu'esti valor ye mayor o igual a %(limit_value)s."
+
+#, python-format
+msgid ""
+"Ensure this value has at least %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at least %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+"Asegúrate qu'esti valor tien polo menos %(limit_value)d caráuter (tien "
+"%(show_value)d)."
+msgstr[1] ""
+"Asegúrate qu'esti valor tien polo menos %(limit_value)d caráuteres (tien "
+"%(show_value)d)."
+
+#, python-format
+msgid ""
+"Ensure this value has at most %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at most %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+"Asegúrate qu'esti valor tien como muncho %(limit_value)d caráuter (tien "
+"%(show_value)d)."
+msgstr[1] ""
+"Asegúrate qu'esti valor tien como muncho %(limit_value)d caráuteres (tien "
+"%(show_value)d)."
+
+msgid "Enter a number."
+msgstr "Introduz un númberu."
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s digit in total."
+msgid_plural "Ensure that there are no more than %(max)s digits in total."
+msgstr[0] "Asegúrate que nun hai más de %(max)s díxitu en total."
+msgstr[1] "Asegúrate que nun hai más de %(max)s díxitos en total."
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s decimal place."
+msgid_plural "Ensure that there are no more than %(max)s decimal places."
+msgstr[0] "Asegúrate que nun hai más de %(max)s allugamientu decimal."
+msgstr[1] "Asegúrate que nun hai más de %(max)s allugamientos decimales."
+
+#, python-format
+msgid ""
+"Ensure that there are no more than %(max)s digit before the decimal point."
+msgid_plural ""
+"Ensure that there are no more than %(max)s digits before the decimal point."
+msgstr[0] ""
+"Asegúrate que nun hai más de %(max)s díxitu enantes del puntu decimal."
+msgstr[1] ""
+"Asegúrate que nun hai más de %(max)s díxitos enantes del puntu decimal."
+
+#, python-format
+msgid ""
+"File extension “%(extension)s” is not allowed. Allowed extensions are: "
+"%(allowed_extensions)s."
+msgstr ""
+
+msgid "Null characters are not allowed."
+msgstr ""
+
+msgid "and"
+msgstr "y"
+
+#, python-format
+msgid "%(model_name)s with this %(field_labels)s already exists."
+msgstr ""
+
+#, python-format
+msgid "Value %(value)r is not a valid choice."
+msgstr ""
+
+msgid "This field cannot be null."
+msgstr "Esti campu nun pue ser nulu."
+
+msgid "This field cannot be blank."
+msgstr "Esti campu nun pue tar baleru."
+
+#, python-format
+msgid "%(model_name)s with this %(field_label)s already exists."
+msgstr "%(model_name)s con esti %(field_label)s yá esiste."
+
+#. Translators: The 'lookup_type' is one of 'date', 'year' or 'month'.
+#. Eg: "Title must be unique for pub_date year"
+#, python-format
+msgid ""
+"%(field_label)s must be unique for %(date_field_label)s %(lookup_type)s."
+msgstr ""
+
+#, python-format
+msgid "Field of type: %(field_type)s"
+msgstr "Campu de la triba: %(field_type)s"
+
+#, python-format
+msgid "“%(value)s” value must be either True or False."
+msgstr ""
+
+#, python-format
+msgid "“%(value)s” value must be either True, False, or None."
+msgstr ""
+
+msgid "Boolean (Either True or False)"
+msgstr "Boleanu (tamién True o False)"
+
+#, python-format
+msgid "String (up to %(max_length)s)"
+msgstr "Cadena (fasta %(max_length)s)"
+
+msgid "Comma-separated integers"
+msgstr "Enteros separtaos per coma"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid date format. It must be in YYYY-MM-DD "
+"format."
+msgstr ""
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD) but it is an invalid "
+"date."
+msgstr ""
+
+msgid "Date (without time)"
+msgstr "Data (ensin hora)"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in YYYY-MM-DD HH:MM[:ss[."
+"uuuuuu]][TZ] format."
+msgstr ""
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD HH:MM[:ss[.uuuuuu]]"
+"[TZ]) but it is an invalid date/time."
+msgstr ""
+
+msgid "Date (with time)"
+msgstr "Data (con hora)"
+
+#, python-format
+msgid "“%(value)s” value must be a decimal number."
+msgstr ""
+
+msgid "Decimal number"
+msgstr "Númberu decimal"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in [DD] [[HH:]MM:]ss[."
+"uuuuuu] format."
+msgstr ""
+
+msgid "Duration"
+msgstr ""
+
+msgid "Email address"
+msgstr "Direición de corréu"
+
+msgid "File path"
+msgstr "Camín del ficheru"
+
+#, python-format
+msgid "“%(value)s” value must be a float."
+msgstr ""
+
+msgid "Floating point number"
+msgstr "Númberu de puntu flotante"
+
+#, python-format
+msgid "“%(value)s” value must be an integer."
+msgstr ""
+
+msgid "Integer"
+msgstr "Enteru"
+
+msgid "Big (8 byte) integer"
+msgstr "Enteru big (8 byte)"
+
+msgid "IPv4 address"
+msgstr "Direición IPv4"
+
+msgid "IP address"
+msgstr "Direición IP"
+
+#, python-format
+msgid "“%(value)s” value must be either None, True or False."
+msgstr ""
+
+msgid "Boolean (Either True, False or None)"
+msgstr "Boleanu (tamién True, False o None)"
+
+msgid "Positive integer"
+msgstr "Enteru positivu"
+
+msgid "Positive small integer"
+msgstr "Enteru pequeñu positivu"
+
+#, python-format
+msgid "Slug (up to %(max_length)s)"
+msgstr "Slug (fasta %(max_length)s)"
+
+msgid "Small integer"
+msgstr "Enteru pequeñu"
+
+msgid "Text"
+msgstr "Testu"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in HH:MM[:ss[.uuuuuu]] "
+"format."
+msgstr ""
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (HH:MM[:ss[.uuuuuu]]) but it is an "
+"invalid time."
+msgstr ""
+
+msgid "Time"
+msgstr "Hora"
+
+msgid "URL"
+msgstr "URL"
+
+msgid "Raw binary data"
+msgstr "Datos binarios crudos"
+
+#, python-format
+msgid "“%(value)s” is not a valid UUID."
+msgstr ""
+
+msgid "Universally unique identifier"
+msgstr ""
+
+msgid "File"
+msgstr "Ficheru"
+
+msgid "Image"
+msgstr "Imaxe"
+
+#, python-format
+msgid "%(model)s instance with %(field)s %(value)r does not exist."
+msgstr ""
+
+msgid "Foreign Key (type determined by related field)"
+msgstr "Clave foriata (triba determinada pol campu rellacionáu)"
+
+msgid "One-to-one relationship"
+msgstr "Rellación a ún"
+
+#, python-format
+msgid "%(from)s-%(to)s relationship"
+msgstr ""
+
+#, python-format
+msgid "%(from)s-%(to)s relationships"
+msgstr ""
+
+msgid "Many-to-many relationship"
+msgstr "Rellación a munchos"
+
+#. Translators: If found as last label character, these punctuation
+#. characters will prevent the default label_suffix to be appended to the
+#. label
+msgid ":?.!"
+msgstr ":?.!"
+
+msgid "This field is required."
+msgstr "Requierse esti campu."
+
+msgid "Enter a whole number."
+msgstr "Introduz un númberu completu"
+
+msgid "Enter a valid date."
+msgstr "Introduz una data válida."
+
+msgid "Enter a valid time."
+msgstr "Introduz una hora válida."
+
+msgid "Enter a valid date/time."
+msgstr "Introduz una data/hora válida."
+
+msgid "Enter a valid duration."
+msgstr ""
+
+#, python-brace-format
+msgid "The number of days must be between {min_days} and {max_days}."
+msgstr ""
+
+msgid "No file was submitted. Check the encoding type on the form."
+msgstr "Nun s'unvió'l ficheru. Comprueba la triba de cifráu nel formulariu."
+
+msgid "No file was submitted."
+msgstr "No file was submitted."
+
+msgid "The submitted file is empty."
+msgstr "El ficheru dunviáu ta baleru."
+
+#, python-format
+msgid "Ensure this filename has at most %(max)d character (it has %(length)d)."
+msgid_plural ""
+"Ensure this filename has at most %(max)d characters (it has %(length)d)."
+msgstr[0] ""
+"Asegúrate qu'esti nome de ficheru tien polo menos %(max)d caráuter (tien "
+"%(length)d)."
+msgstr[1] ""
+"Asegúrate qu'esti nome de ficheru tien polo menos %(max)d caráuteres (tien "
+"%(length)d)."
+
+msgid "Please either submit a file or check the clear checkbox, not both."
+msgstr "Por favor, dunvia un ficheru o conseña la caxella , non dambos."
+
+msgid ""
+"Upload a valid image. The file you uploaded was either not an image or a "
+"corrupted image."
+msgstr ""
+"Xubi una imaxe válida. El ficheru que xubiesti o nun yera una imaxe, o taba "
+"toriada."
+
+#, python-format
+msgid "Select a valid choice. %(value)s is not one of the available choices."
+msgstr ""
+"Esbilla una escoyeta válida. %(value)s nun una ún de les escoyetes "
+"disponibles."
+
+msgid "Enter a list of values."
+msgstr "Introduz una llista valores."
+
+msgid "Enter a complete value."
+msgstr ""
+
+msgid "Enter a valid UUID."
+msgstr ""
+
+#. Translators: This is the default suffix added to form field labels
+msgid ":"
+msgstr ":"
+
+#, python-format
+msgid "(Hidden field %(name)s) %(error)s"
+msgstr "(Campu anubríu %(name)s) %(error)s"
+
+msgid "ManagementForm data is missing or has been tampered with"
+msgstr ""
+
+#, python-format
+msgid "Please submit %d or fewer forms."
+msgid_plural "Please submit %d or fewer forms."
+msgstr[0] "Por favor, dunvia %d o menos formularios."
+msgstr[1] "Por favor, dunvia %d o menos formularios."
+
+#, python-format
+msgid "Please submit %d or more forms."
+msgid_plural "Please submit %d or more forms."
+msgstr[0] ""
+msgstr[1] ""
+
+msgid "Order"
+msgstr "Orde"
+
+msgid "Delete"
+msgstr "Desanciar"
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s."
+msgstr "Por favor, igua'l datu duplicáu de %(field)s."
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s, which must be unique."
+msgstr ""
+"Por favor, igua'l datu duplicáu pa %(field)s, el cual tien de ser únicu."
+
+#, python-format
+msgid ""
+"Please correct the duplicate data for %(field_name)s which must be unique "
+"for the %(lookup)s in %(date_field)s."
+msgstr ""
+"Por favor, igua'l datu duplicáu de %(field_name)s el cual tien de ser únicu "
+"pal %(lookup)s en %(date_field)s."
+
+msgid "Please correct the duplicate values below."
+msgstr "Por favor, igua los valores duplicaos embaxo"
+
+msgid "The inline value did not match the parent instance."
+msgstr ""
+
+msgid "Select a valid choice. That choice is not one of the available choices."
+msgstr ""
+"Esbilla una escoyeta válida. Esa escoyeta nun ye una de les escoyetes "
+"disponibles."
+
+#, python-format
+msgid "“%(pk)s” is not a valid value."
+msgstr ""
+
+#, python-format
+msgid ""
+"%(datetime)s couldn’t be interpreted in time zone %(current_timezone)s; it "
+"may be ambiguous or it may not exist."
+msgstr ""
+
+msgid "Clear"
+msgstr "Llimpiar"
+
+msgid "Currently"
+msgstr "Anguaño"
+
+msgid "Change"
+msgstr "Camudar"
+
+msgid "Unknown"
+msgstr "Desconocíu"
+
+msgid "Yes"
+msgstr "Sí"
+
+msgid "No"
+msgstr "Non"
+
+msgid "Year"
+msgstr ""
+
+msgid "Month"
+msgstr ""
+
+msgid "Day"
+msgstr ""
+
+msgid "yes,no,maybe"
+msgstr "sí,non,quiciabes"
+
+#, python-format
+msgid "%(size)d byte"
+msgid_plural "%(size)d bytes"
+msgstr[0] "%(size)d byte"
+msgstr[1] "%(size)d bytes"
+
+#, python-format
+msgid "%s KB"
+msgstr "%s KB"
+
+#, python-format
+msgid "%s MB"
+msgstr "%s MB"
+
+#, python-format
+msgid "%s GB"
+msgstr "%s GB"
+
+#, python-format
+msgid "%s TB"
+msgstr "%s TB"
+
+#, python-format
+msgid "%s PB"
+msgstr "%s PB"
+
+msgid "p.m."
+msgstr "p.m."
+
+msgid "a.m."
+msgstr "a.m."
+
+msgid "PM"
+msgstr "PM"
+
+msgid "AM"
+msgstr "AM"
+
+msgid "midnight"
+msgstr "Media nueche"
+
+msgid "noon"
+msgstr "Meudía"
+
+msgid "Monday"
+msgstr "Llunes"
+
+msgid "Tuesday"
+msgstr "Martes"
+
+msgid "Wednesday"
+msgstr "Miércoles"
+
+msgid "Thursday"
+msgstr "Xueves"
+
+msgid "Friday"
+msgstr "Vienres"
+
+msgid "Saturday"
+msgstr "Sábadu"
+
+msgid "Sunday"
+msgstr "Domingu"
+
+msgid "Mon"
+msgstr "LLu"
+
+msgid "Tue"
+msgstr "Mar"
+
+msgid "Wed"
+msgstr "Mie"
+
+msgid "Thu"
+msgstr "Xue"
+
+msgid "Fri"
+msgstr "Vie"
+
+msgid "Sat"
+msgstr "Sáb"
+
+msgid "Sun"
+msgstr "Dom"
+
+msgid "January"
+msgstr "Xineru"
+
+msgid "February"
+msgstr "Febreru"
+
+msgid "March"
+msgstr "Marzu"
+
+msgid "April"
+msgstr "Abril"
+
+msgid "May"
+msgstr "Mayu"
+
+msgid "June"
+msgstr "Xunu"
+
+msgid "July"
+msgstr "Xunetu"
+
+msgid "August"
+msgstr "Agostu"
+
+msgid "September"
+msgstr "Setiembre"
+
+msgid "October"
+msgstr "Ochobre"
+
+msgid "November"
+msgstr "Payares"
+
+msgid "December"
+msgstr "Avientu"
+
+msgid "jan"
+msgstr "xin"
+
+msgid "feb"
+msgstr "feb"
+
+msgid "mar"
+msgstr "mar"
+
+msgid "apr"
+msgstr "abr"
+
+msgid "may"
+msgstr "may"
+
+msgid "jun"
+msgstr "xun"
+
+msgid "jul"
+msgstr "xnt"
+
+msgid "aug"
+msgstr "ago"
+
+msgid "sep"
+msgstr "set"
+
+msgid "oct"
+msgstr "och"
+
+msgid "nov"
+msgstr "pay"
+
+msgid "dec"
+msgstr "avi"
+
+msgctxt "abbrev. month"
+msgid "Jan."
+msgstr "Xin."
+
+msgctxt "abbrev. month"
+msgid "Feb."
+msgstr "Feb."
+
+msgctxt "abbrev. month"
+msgid "March"
+msgstr "Mar."
+
+msgctxt "abbrev. month"
+msgid "April"
+msgstr "Abr."
+
+msgctxt "abbrev. month"
+msgid "May"
+msgstr "May."
+
+msgctxt "abbrev. month"
+msgid "June"
+msgstr "Xun."
+
+msgctxt "abbrev. month"
+msgid "July"
+msgstr "Xnt."
+
+msgctxt "abbrev. month"
+msgid "Aug."
+msgstr "Ago."
+
+msgctxt "abbrev. month"
+msgid "Sept."
+msgstr "Set."
+
+msgctxt "abbrev. month"
+msgid "Oct."
+msgstr "Och."
+
+msgctxt "abbrev. month"
+msgid "Nov."
+msgstr "Pay."
+
+msgctxt "abbrev. month"
+msgid "Dec."
+msgstr "Avi."
+
+msgctxt "alt. month"
+msgid "January"
+msgstr "Xineru"
+
+msgctxt "alt. month"
+msgid "February"
+msgstr "Febreru"
+
+msgctxt "alt. month"
+msgid "March"
+msgstr "Marzu"
+
+msgctxt "alt. month"
+msgid "April"
+msgstr "Abril"
+
+msgctxt "alt. month"
+msgid "May"
+msgstr "Mayu"
+
+msgctxt "alt. month"
+msgid "June"
+msgstr "Xunu"
+
+msgctxt "alt. month"
+msgid "July"
+msgstr "Xunetu"
+
+msgctxt "alt. month"
+msgid "August"
+msgstr "Agostu"
+
+msgctxt "alt. month"
+msgid "September"
+msgstr "Setiembre"
+
+msgctxt "alt. month"
+msgid "October"
+msgstr "Ochobre"
+
+msgctxt "alt. month"
+msgid "November"
+msgstr "Payares"
+
+msgctxt "alt. month"
+msgid "December"
+msgstr "Avientu"
+
+msgid "This is not a valid IPv6 address."
+msgstr ""
+
+#, python-format
+msgctxt "String to return when truncating text"
+msgid "%(truncated_text)s…"
+msgstr ""
+
+msgid "or"
+msgstr "o"
+
+#. Translators: This string is used as a separator between list elements
+msgid ", "
+msgstr ", "
+
+#, python-format
+msgid "%d year"
+msgid_plural "%d years"
+msgstr[0] "%d añu"
+msgstr[1] "%d años"
+
+#, python-format
+msgid "%d month"
+msgid_plural "%d months"
+msgstr[0] "%d mes"
+msgstr[1] "%d meses"
+
+#, python-format
+msgid "%d week"
+msgid_plural "%d weeks"
+msgstr[0] "%d selmana"
+msgstr[1] "%d selmanes"
+
+#, python-format
+msgid "%d day"
+msgid_plural "%d days"
+msgstr[0] "%d día"
+msgstr[1] "%d díes"
+
+#, python-format
+msgid "%d hour"
+msgid_plural "%d hours"
+msgstr[0] "%d hora"
+msgstr[1] "%d hores"
+
+#, python-format
+msgid "%d minute"
+msgid_plural "%d minutes"
+msgstr[0] "%d minutu"
+msgstr[1] "%d minutos"
+
+msgid "0 minutes"
+msgstr "0 minutos"
+
+msgid "Forbidden"
+msgstr ""
+
+msgid "CSRF verification failed. Request aborted."
+msgstr ""
+
+msgid ""
+"You are seeing this message because this HTTPS site requires a “Referer "
+"header” to be sent by your Web browser, but none was sent. This header is "
+"required for security reasons, to ensure that your browser is not being "
+"hijacked by third parties."
+msgstr ""
+
+msgid ""
+"If you have configured your browser to disable “Referer” headers, please re-"
+"enable them, at least for this site, or for HTTPS connections, or for “same-"
+"origin” requests."
+msgstr ""
+
+msgid ""
+"If you are using the tag or "
+"including the “Referrer-Policy: no-referrer” header, please remove them. The "
+"CSRF protection requires the “Referer” header to do strict referer checking. "
+"If you’re concerned about privacy, use alternatives like for links to third-party sites."
+msgstr ""
+
+msgid ""
+"You are seeing this message because this site requires a CSRF cookie when "
+"submitting forms. This cookie is required for security reasons, to ensure "
+"that your browser is not being hijacked by third parties."
+msgstr ""
+
+msgid ""
+"If you have configured your browser to disable cookies, please re-enable "
+"them, at least for this site, or for “same-origin” requests."
+msgstr ""
+
+msgid "More information is available with DEBUG=True."
+msgstr ""
+
+msgid "No year specified"
+msgstr "Nun s'especificó l'añu"
+
+msgid "Date out of range"
+msgstr ""
+
+msgid "No month specified"
+msgstr "Nun s'especificó'l mes"
+
+msgid "No day specified"
+msgstr "Nun s'especificó'l día"
+
+msgid "No week specified"
+msgstr "Nun s'especificó la selmana"
+
+#, python-format
+msgid "No %(verbose_name_plural)s available"
+msgstr "Ensin %(verbose_name_plural)s disponible"
+
+#, python-format
+msgid ""
+"Future %(verbose_name_plural)s not available because %(class_name)s."
+"allow_future is False."
+msgstr ""
+"Nun ta disponible'l %(verbose_name_plural)s futuru porque %(class_name)s."
+"allow_future ye False."
+
+#, python-format
+msgid "Invalid date string “%(datestr)s” given format “%(format)s”"
+msgstr ""
+
+#, python-format
+msgid "No %(verbose_name)s found matching the query"
+msgstr "Nun s'alcontró %(verbose_name)s que concase cola gueta"
+
+msgid "Page is not “last”, nor can it be converted to an int."
+msgstr ""
+
+#, python-format
+msgid "Invalid page (%(page_number)s): %(message)s"
+msgstr "Páxina inválida (%(page_number)s): %(message)s"
+
+#, python-format
+msgid "Empty list and “%(class_name)s.allow_empty” is False."
+msgstr ""
+
+msgid "Directory indexes are not allowed here."
+msgstr "Nun tán almitíos equí los indexaos de direutoriu."
+
+#, python-format
+msgid "“%(path)s” does not exist"
+msgstr ""
+
+#, python-format
+msgid "Index of %(directory)s"
+msgstr "Índiz de %(directory)s"
+
+msgid "Django: the Web framework for perfectionists with deadlines."
+msgstr ""
+
+#, python-format
+msgid ""
+"View release notes for Django %(version)s"
+msgstr ""
+
+msgid "The install worked successfully! Congratulations!"
+msgstr ""
+
+#, python-format
+msgid ""
+"You are seeing this page because DEBUG=True is in your settings file and you have not configured any "
+"URLs."
+msgstr ""
+
+msgid "Django Documentation"
+msgstr ""
+
+msgid "Topics, references, & how-to’s"
+msgstr ""
+
+msgid "Tutorial: A Polling App"
+msgstr ""
+
+msgid "Get started with Django"
+msgstr ""
+
+msgid "Django Community"
+msgstr ""
+
+msgid "Connect, get help, or contribute"
+msgstr ""
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/az/LC_MESSAGES/django.mo b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/az/LC_MESSAGES/django.mo
new file mode 100644
index 00000000..f24150dc
Binary files /dev/null and b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/az/LC_MESSAGES/django.mo differ
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/az/LC_MESSAGES/django.po b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/az/LC_MESSAGES/django.po
new file mode 100644
index 00000000..2e25c045
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/az/LC_MESSAGES/django.po
@@ -0,0 +1,1308 @@
+# This file is distributed under the same license as the Django package.
+#
+# Translators:
+# Emin Mastizada , 2018,2020
+# Emin Mastizada , 2015-2016
+# Metin Amiroff , 2011
+# Nicat Məmmədov , 2022
+msgid ""
+msgstr ""
+"Project-Id-Version: django\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2022-05-17 05:23-0500\n"
+"PO-Revision-Date: 2022-07-25 06:49+0000\n"
+"Last-Translator: Nicat Məmmədov , 2022\n"
+"Language-Team: Azerbaijani (http://www.transifex.com/django/django/language/"
+"az/)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Language: az\n"
+"Plural-Forms: nplurals=2; plural=(n != 1);\n"
+
+msgid "Afrikaans"
+msgstr "Afrikaans"
+
+msgid "Arabic"
+msgstr "Ərəbcə"
+
+msgid "Algerian Arabic"
+msgstr "Əlcəzair Ərəbcəsi"
+
+msgid "Asturian"
+msgstr "Asturiyaca"
+
+msgid "Azerbaijani"
+msgstr "Azərbaycanca"
+
+msgid "Bulgarian"
+msgstr "Bolqarca"
+
+msgid "Belarusian"
+msgstr "Belarusca"
+
+msgid "Bengali"
+msgstr "Benqalca"
+
+msgid "Breton"
+msgstr "Bretonca"
+
+msgid "Bosnian"
+msgstr "Bosniyaca"
+
+msgid "Catalan"
+msgstr "Katalanca"
+
+msgid "Czech"
+msgstr "Çexcə"
+
+msgid "Welsh"
+msgstr "Uelscə"
+
+msgid "Danish"
+msgstr "Danimarkaca"
+
+msgid "German"
+msgstr "Almanca"
+
+msgid "Lower Sorbian"
+msgstr "Aşağı Sorbca"
+
+msgid "Greek"
+msgstr "Yunanca"
+
+msgid "English"
+msgstr "İngiliscə"
+
+msgid "Australian English"
+msgstr "Avstraliya İngiliscəsi"
+
+msgid "British English"
+msgstr "Britaniya İngiliscəsi"
+
+msgid "Esperanto"
+msgstr "Esperanto"
+
+msgid "Spanish"
+msgstr "İspanca"
+
+msgid "Argentinian Spanish"
+msgstr "Argentina İspancası"
+
+msgid "Colombian Spanish"
+msgstr "Kolumbia İspancası"
+
+msgid "Mexican Spanish"
+msgstr "Meksika İspancası"
+
+msgid "Nicaraguan Spanish"
+msgstr "Nikaraqua İspancası"
+
+msgid "Venezuelan Spanish"
+msgstr "Venesuela İspancası"
+
+msgid "Estonian"
+msgstr "Estonca"
+
+msgid "Basque"
+msgstr "Baskca"
+
+msgid "Persian"
+msgstr "Farsca"
+
+msgid "Finnish"
+msgstr "Fincə"
+
+msgid "French"
+msgstr "Fransızca"
+
+msgid "Frisian"
+msgstr "Friscə"
+
+msgid "Irish"
+msgstr "İrlandca"
+
+msgid "Scottish Gaelic"
+msgstr "Şotland Keltcəsi"
+
+msgid "Galician"
+msgstr "Qallik dili"
+
+msgid "Hebrew"
+msgstr "İbranicə"
+
+msgid "Hindi"
+msgstr "Hindcə"
+
+msgid "Croatian"
+msgstr "Xorvatca"
+
+msgid "Upper Sorbian"
+msgstr "Üst Sorbca"
+
+msgid "Hungarian"
+msgstr "Macarca"
+
+msgid "Armenian"
+msgstr "Ermənicə"
+
+msgid "Interlingua"
+msgstr "İnterlinqua"
+
+msgid "Indonesian"
+msgstr "İndonezcə"
+
+msgid "Igbo"
+msgstr "İqbo dili"
+
+msgid "Ido"
+msgstr "İdoca"
+
+msgid "Icelandic"
+msgstr "İslandca"
+
+msgid "Italian"
+msgstr "İtalyanca"
+
+msgid "Japanese"
+msgstr "Yaponca"
+
+msgid "Georgian"
+msgstr "Gürcücə"
+
+msgid "Kabyle"
+msgstr "Kabile"
+
+msgid "Kazakh"
+msgstr "Qazax"
+
+msgid "Khmer"
+msgstr "Kxmercə"
+
+msgid "Kannada"
+msgstr "Kannada dili"
+
+msgid "Korean"
+msgstr "Koreyca"
+
+msgid "Kyrgyz"
+msgstr "Qırğız"
+
+msgid "Luxembourgish"
+msgstr "Lüksemburqca"
+
+msgid "Lithuanian"
+msgstr "Litva dili"
+
+msgid "Latvian"
+msgstr "Latviya dili"
+
+msgid "Macedonian"
+msgstr "Makedonca"
+
+msgid "Malayalam"
+msgstr "Malayamca"
+
+msgid "Mongolian"
+msgstr "Monqolca"
+
+msgid "Marathi"
+msgstr "Marathicə"
+
+msgid "Malay"
+msgstr "Malay"
+
+msgid "Burmese"
+msgstr "Burmescə"
+
+msgid "Norwegian Bokmål"
+msgstr "Norveç Bukmolcası"
+
+msgid "Nepali"
+msgstr "Nepal"
+
+msgid "Dutch"
+msgstr "Flamandca"
+
+msgid "Norwegian Nynorsk"
+msgstr "Nynorsk Norveçcəsi"
+
+msgid "Ossetic"
+msgstr "Osetincə"
+
+msgid "Punjabi"
+msgstr "Pancabicə"
+
+msgid "Polish"
+msgstr "Polyakca"
+
+msgid "Portuguese"
+msgstr "Portuqalca"
+
+msgid "Brazilian Portuguese"
+msgstr "Braziliya Portuqalcası"
+
+msgid "Romanian"
+msgstr "Rumınca"
+
+msgid "Russian"
+msgstr "Rusca"
+
+msgid "Slovak"
+msgstr "Slovakca"
+
+msgid "Slovenian"
+msgstr "Slovencə"
+
+msgid "Albanian"
+msgstr "Albanca"
+
+msgid "Serbian"
+msgstr "Serbcə"
+
+msgid "Serbian Latin"
+msgstr "Serbcə Latın"
+
+msgid "Swedish"
+msgstr "İsveçcə"
+
+msgid "Swahili"
+msgstr "Suahili"
+
+msgid "Tamil"
+msgstr "Tamilcə"
+
+msgid "Telugu"
+msgstr "Teluqu dili"
+
+msgid "Tajik"
+msgstr "Tacik"
+
+msgid "Thai"
+msgstr "Tayca"
+
+msgid "Turkmen"
+msgstr "Türkmən"
+
+msgid "Turkish"
+msgstr "Türkcə"
+
+msgid "Tatar"
+msgstr "Tatar"
+
+msgid "Udmurt"
+msgstr "Udmurtca"
+
+msgid "Ukrainian"
+msgstr "Ukraynaca"
+
+msgid "Urdu"
+msgstr "Urduca"
+
+msgid "Uzbek"
+msgstr "Özbəkcə"
+
+msgid "Vietnamese"
+msgstr "Vyetnamca"
+
+msgid "Simplified Chinese"
+msgstr "Sadələşdirilmiş Çincə"
+
+msgid "Traditional Chinese"
+msgstr "Ənənəvi Çincə"
+
+msgid "Messages"
+msgstr "Mesajlar"
+
+msgid "Site Maps"
+msgstr "Sayt Xəritələri"
+
+msgid "Static Files"
+msgstr "Statik Fayllar"
+
+msgid "Syndication"
+msgstr "Sindikasiya"
+
+#. Translators: String used to replace omitted page numbers in elided page
+#. range generated by paginators, e.g. [1, 2, '…', 5, 6, 7, '…', 9, 10].
+msgid "…"
+msgstr "…"
+
+msgid "That page number is not an integer"
+msgstr "Səhifə nömrəsi rəqəm deyil"
+
+msgid "That page number is less than 1"
+msgstr "Səhifə nömrəsi 1-dən balacadır"
+
+msgid "That page contains no results"
+msgstr "Səhifədə nəticə yoxdur"
+
+msgid "Enter a valid value."
+msgstr "Düzgün qiymət daxil edin."
+
+msgid "Enter a valid URL."
+msgstr "Düzgün URL daxil edin."
+
+msgid "Enter a valid integer."
+msgstr "Düzgün rəqəm daxil edin."
+
+msgid "Enter a valid email address."
+msgstr "Düzgün e-poçt ünvanı daxil edin."
+
+#. Translators: "letters" means latin letters: a-z and A-Z.
+msgid ""
+"Enter a valid “slug” consisting of letters, numbers, underscores or hyphens."
+msgstr ""
+"Hərflərdən, rəqəmlərdən, alt-xətlərdən və ya defislərdən ibarət düzgün "
+"qısaltma (“slug”) daxil edin."
+
+msgid ""
+"Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or "
+"hyphens."
+msgstr ""
+"Unicode hərflərdən, rəqəmlərdən, alt-xətlərdən və ya defislərdən ibarət "
+"düzgün qısaltma (“slug”) daxil edin."
+
+msgid "Enter a valid IPv4 address."
+msgstr "Düzgün IPv4 ünvanı daxil edin."
+
+msgid "Enter a valid IPv6 address."
+msgstr "Düzgün IPv6 ünvanını daxil edin."
+
+msgid "Enter a valid IPv4 or IPv6 address."
+msgstr "Düzgün IPv4 və ya IPv6 ünvanını daxil edin."
+
+msgid "Enter only digits separated by commas."
+msgstr "Vergüllə ayırmaqla yalnız rəqəmlər daxil edin."
+
+#, python-format
+msgid "Ensure this value is %(limit_value)s (it is %(show_value)s)."
+msgstr "Əmin edin ki, bu qiymət %(limit_value)s-dir (bu %(show_value)s-dir)."
+
+#, python-format
+msgid "Ensure this value is less than or equal to %(limit_value)s."
+msgstr ""
+"Bu qiymətin %(limit_value)s-ya bərabər və ya ondan kiçik olduğunu yoxlayın."
+
+#, python-format
+msgid "Ensure this value is greater than or equal to %(limit_value)s."
+msgstr ""
+"Bu qiymətin %(limit_value)s-ya bərabər və ya ondan böyük olduğunu yoxlayın."
+
+#, python-format
+msgid "Ensure this value is a multiple of step size %(limit_value)s."
+msgstr ""
+
+#, python-format
+msgid ""
+"Ensure this value has at least %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at least %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+"Bu dəyərin ən az %(limit_value)d simvol olduğuna əmin olun (%(show_value)d "
+"var)"
+msgstr[1] ""
+"Bu dəyərin ən az %(limit_value)d simvol olduğuna əmin olun (%(show_value)d "
+"var)"
+
+#, python-format
+msgid ""
+"Ensure this value has at most %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at most %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+"Bu dəyərin ən çox %(limit_value)d simvol olduğuna əmin olun (%(show_value)d "
+"var)"
+msgstr[1] ""
+"Bu dəyərin ən çox %(limit_value)d simvol olduğuna əmin olun (%(show_value)d "
+"var)"
+
+msgid "Enter a number."
+msgstr "Ədəd daxil edin."
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s digit in total."
+msgid_plural "Ensure that there are no more than %(max)s digits in total."
+msgstr[0] "Toplamda %(max)s rəqəmdən çox olmadığına əmin olun."
+msgstr[1] "Toplamda %(max)s rəqəmdən çox olmadığına əmin olun."
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s decimal place."
+msgid_plural "Ensure that there are no more than %(max)s decimal places."
+msgstr[0] "Onluq hissənin %(max)s rəqəmdən çox olmadığına əmin olun."
+msgstr[1] "Onluq hissənin %(max)s rəqəmdən çox olmadığına əmin olun."
+
+#, python-format
+msgid ""
+"Ensure that there are no more than %(max)s digit before the decimal point."
+msgid_plural ""
+"Ensure that there are no more than %(max)s digits before the decimal point."
+msgstr[0] "Onluq hissədən əvvəl %(max)s rəqəmdən çox olmadığına əmin olun."
+msgstr[1] "Onluq hissədən əvvəl %(max)s rəqəmdən çox olmadığına əmin olun."
+
+#, python-format
+msgid ""
+"File extension “%(extension)s” is not allowed. Allowed extensions are: "
+"%(allowed_extensions)s."
+msgstr ""
+"“%(extension)s” fayl uzantısına icazə verilmir. İcazə verilən fayl "
+"uzantıları: %(allowed_extensions)s."
+
+msgid "Null characters are not allowed."
+msgstr "Null simvollara icazə verilmir."
+
+msgid "and"
+msgstr "və"
+
+#, python-format
+msgid "%(model_name)s with this %(field_labels)s already exists."
+msgstr "%(field_labels)s ilə %(model_name)s artıq mövcuddur."
+
+#, python-format
+msgid "Constraint “%(name)s” is violated."
+msgstr ""
+
+#, python-format
+msgid "Value %(value)r is not a valid choice."
+msgstr "%(value)r dəyəri doğru seçim deyil."
+
+msgid "This field cannot be null."
+msgstr "Bu sahə boş qala bilməz."
+
+msgid "This field cannot be blank."
+msgstr "Bu sahə ağ qala bilməz."
+
+#, python-format
+msgid "%(model_name)s with this %(field_label)s already exists."
+msgstr "%(model_name)s bu %(field_label)s sahə ilə artıq mövcuddur."
+
+#. Translators: The 'lookup_type' is one of 'date', 'year' or
+#. 'month'. Eg: "Title must be unique for pub_date year"
+#, python-format
+msgid ""
+"%(field_label)s must be unique for %(date_field_label)s %(lookup_type)s."
+msgstr ""
+"%(field_label)s dəyəri %(date_field_label)s %(lookup_type)s üçün unikal "
+"olmalıdır."
+
+#, python-format
+msgid "Field of type: %(field_type)s"
+msgstr "Sahənin tipi: %(field_type)s"
+
+#, python-format
+msgid "“%(value)s” value must be either True or False."
+msgstr "“%(value)s” dəyəri True və ya False olmalıdır."
+
+#, python-format
+msgid "“%(value)s” value must be either True, False, or None."
+msgstr "“%(value)s” dəyəri True, False və ya None olmalıdır."
+
+msgid "Boolean (Either True or False)"
+msgstr "Bul (ya Doğru, ya Yalan)"
+
+#, python-format
+msgid "String (up to %(max_length)s)"
+msgstr "Sətir (%(max_length)s simvola kimi)"
+
+msgid "Comma-separated integers"
+msgstr "Vergüllə ayrılmış tam ədədlər"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid date format. It must be in YYYY-MM-DD "
+"format."
+msgstr ""
+"“%(value)s” dəyəri səhv tarix formatındadır. Formatı YYYY-MM-DD olmalıdır."
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD) but it is an invalid "
+"date."
+msgstr ""
+"“%(value)s” dəyəri düzgün formatdadır (YYYY-MM-DD) amma bu tarix xətalıdır."
+
+msgid "Date (without time)"
+msgstr "Tarix (saatsız)"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in YYYY-MM-DD HH:MM[:ss[."
+"uuuuuu]][TZ] format."
+msgstr ""
+"“%(value)s” dəyərinin formatı səhvdir. Formatı YYYY-MM-DD HH:MM[:ss[.uuuuuu]]"
+"[TZ] olmalıdır."
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD HH:MM[:ss[.uuuuuu]]"
+"[TZ]) but it is an invalid date/time."
+msgstr ""
+"“%(value)s” dəyərinin formatı düzgündür (YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) "
+"amma bu tarix xətalıdır."
+
+msgid "Date (with time)"
+msgstr "Tarix (vaxt ilə)"
+
+#, python-format
+msgid "“%(value)s” value must be a decimal number."
+msgstr "“%(value)s” dəyəri onluq kəsrli (decimal) rəqəm olmalıdır."
+
+msgid "Decimal number"
+msgstr "Rasional ədəd"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in [DD] [[HH:]MM:]ss[."
+"uuuuuu] format."
+msgstr ""
+"“%(value)s” dəyərinin formatı səhvdir. Formatı [DD] [HH:[MM:]]ss[.uuuuuu] "
+"olmalıdır."
+
+msgid "Duration"
+msgstr "Müddət"
+
+msgid "Email address"
+msgstr "E-poçt"
+
+msgid "File path"
+msgstr "Faylın ünvanı"
+
+#, python-format
+msgid "“%(value)s” value must be a float."
+msgstr "“%(value)s” dəyəri float olmalıdır."
+
+msgid "Floating point number"
+msgstr "Sürüşən vergüllü ədəd"
+
+#, python-format
+msgid "“%(value)s” value must be an integer."
+msgstr "“%(value)s” dəyəri tam rəqəm olmalıdır."
+
+msgid "Integer"
+msgstr "Tam ədəd"
+
+msgid "Big (8 byte) integer"
+msgstr "Böyük (8 bayt) tam ədəd"
+
+msgid "Small integer"
+msgstr "Kiçik tam ədəd"
+
+msgid "IPv4 address"
+msgstr "IPv4 ünvanı"
+
+msgid "IP address"
+msgstr "IP ünvan"
+
+#, python-format
+msgid "“%(value)s” value must be either None, True or False."
+msgstr "“%(value)s” dəyəri None, True və ya False olmalıdır."
+
+msgid "Boolean (Either True, False or None)"
+msgstr "Bul (Ya Doğru, ya Yalan, ya da Heç nə)"
+
+msgid "Positive big integer"
+msgstr "Müsbət böyük rəqəm"
+
+msgid "Positive integer"
+msgstr "Müsbət tam ədəd"
+
+msgid "Positive small integer"
+msgstr "Müsbət tam kiçik ədəd"
+
+#, python-format
+msgid "Slug (up to %(max_length)s)"
+msgstr "Əzmə (%(max_length)s simvola kimi)"
+
+msgid "Text"
+msgstr "Mətn"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in HH:MM[:ss[.uuuuuu]] "
+"format."
+msgstr ""
+"“%(value)s” dəyərinin formatı səhvdir. Formatı HH:MM[:ss[.uuuuuu]] olmalıdır."
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (HH:MM[:ss[.uuuuuu]]) but it is an "
+"invalid time."
+msgstr ""
+"“%(value)s” dəyəri düzgün formatdadır (HH:MM[:ss[.uuuuuu]]), amma vaxtı "
+"xətalıdır."
+
+msgid "Time"
+msgstr "Vaxt"
+
+msgid "URL"
+msgstr "URL"
+
+msgid "Raw binary data"
+msgstr "Düz ikili (binary) məlumat"
+
+#, python-format
+msgid "“%(value)s” is not a valid UUID."
+msgstr "“%(value)s” keçərli UUID deyil."
+
+msgid "Universally unique identifier"
+msgstr "Universal təkrarolunmaz identifikator"
+
+msgid "File"
+msgstr "Fayl"
+
+msgid "Image"
+msgstr "Şəkil"
+
+msgid "A JSON object"
+msgstr "JSON obyekti"
+
+msgid "Value must be valid JSON."
+msgstr "Dəyər etibarlı JSON olmalıdır."
+
+#, python-format
+msgid "%(model)s instance with %(field)s %(value)r does not exist."
+msgstr "%(field)s dəyəri %(value)r olan %(model)s mövcud deyil."
+
+msgid "Foreign Key (type determined by related field)"
+msgstr "Xarici açar (bağlı olduğu sahəyə uyğun tipi alır)"
+
+msgid "One-to-one relationship"
+msgstr "Birin-birə münasibət"
+
+#, python-format
+msgid "%(from)s-%(to)s relationship"
+msgstr "%(from)s-%(to)s əlaqəsi"
+
+#, python-format
+msgid "%(from)s-%(to)s relationships"
+msgstr "%(from)s-%(to)s əlaqələri"
+
+msgid "Many-to-many relationship"
+msgstr "Çoxun-çoxa münasibət"
+
+#. Translators: If found as last label character, these punctuation
+#. characters will prevent the default label_suffix to be appended to the
+#. label
+msgid ":?.!"
+msgstr ":?.!"
+
+msgid "This field is required."
+msgstr "Bu sahə vacibdir."
+
+msgid "Enter a whole number."
+msgstr "Tam ədəd daxil edin."
+
+msgid "Enter a valid date."
+msgstr "Düzgün tarix daxil edin."
+
+msgid "Enter a valid time."
+msgstr "Düzgün vaxt daxil edin."
+
+msgid "Enter a valid date/time."
+msgstr "Düzgün tarix/vaxt daxil edin."
+
+msgid "Enter a valid duration."
+msgstr "Keçərli müddət daxil edin."
+
+#, python-brace-format
+msgid "The number of days must be between {min_days} and {max_days}."
+msgstr "Günlərin sayı {min_days} ilə {max_days} arasında olmalıdır."
+
+msgid "No file was submitted. Check the encoding type on the form."
+msgstr "Fayl göndərilməyib. Vərəqənin (\"form\") şifrələmə tipini yoxlayın."
+
+msgid "No file was submitted."
+msgstr "Fayl göndərilməyib."
+
+msgid "The submitted file is empty."
+msgstr "Göndərilən fayl boşdur."
+
+#, python-format
+msgid "Ensure this filename has at most %(max)d character (it has %(length)d)."
+msgid_plural ""
+"Ensure this filename has at most %(max)d characters (it has %(length)d)."
+msgstr[0] ""
+"Bu fayl adının ən çox %(max)d simvol olduğuna əmin olun (%(length)d var)."
+msgstr[1] ""
+"Bu fayl adının ən çox %(max)d simvol olduğuna əmin olun (%(length)d var)."
+
+msgid "Please either submit a file or check the clear checkbox, not both."
+msgstr ""
+"Ya fayl göndərin, ya da xanaya quş qoymayın, hər ikisini də birdən etməyin."
+
+msgid ""
+"Upload a valid image. The file you uploaded was either not an image or a "
+"corrupted image."
+msgstr ""
+"Düzgün şəkil göndərin. Göndərdiyiniz fayl ya şəkil deyil, ya da şəkildə "
+"problem var."
+
+#, python-format
+msgid "Select a valid choice. %(value)s is not one of the available choices."
+msgstr "Düzgün seçim edin. %(value)s seçimlər arasında yoxdur."
+
+msgid "Enter a list of values."
+msgstr "Qiymətlərin siyahısını daxil edin."
+
+msgid "Enter a complete value."
+msgstr "Tam dəyər daxil edin."
+
+msgid "Enter a valid UUID."
+msgstr "Keçərli UUID daxil et."
+
+msgid "Enter a valid JSON."
+msgstr "Etibarlı bir JSON daxil edin."
+
+#. Translators: This is the default suffix added to form field labels
+msgid ":"
+msgstr ":"
+
+#, python-format
+msgid "(Hidden field %(name)s) %(error)s"
+msgstr "(Gizli %(name)s sahəsi) %(error)s"
+
+#, python-format
+msgid ""
+"ManagementForm data is missing or has been tampered with. Missing fields: "
+"%(field_names)s. You may need to file a bug report if the issue persists."
+msgstr ""
+
+#, python-format
+msgid "Please submit at most %(num)d form."
+msgid_plural "Please submit at most %(num)d forms."
+msgstr[0] "Zəhmət olmasa ən çox %(num)d forma təsdiqləyin."
+msgstr[1] "Zəhmət olmasa ən çox %(num)d forma təsdiqləyin."
+
+#, python-format
+msgid "Please submit at least %(num)d form."
+msgid_plural "Please submit at least %(num)d forms."
+msgstr[0] "Zəhmət olmasa ən az %(num)d forma təsdiqləyin."
+msgstr[1] "Zəhmət olmasa ən az %(num)d forma təsdiqləyin."
+
+msgid "Order"
+msgstr "Sırala"
+
+msgid "Delete"
+msgstr "Sil"
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s."
+msgstr "%(field)s sahəsinə görə təkrarlanan məlumatlara düzəliş edin."
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s, which must be unique."
+msgstr ""
+"%(field)s sahəsinə görə təkrarlanan məlumatlara düzəliş edin, onların hamısı "
+"fərqli olmalıdır."
+
+#, python-format
+msgid ""
+"Please correct the duplicate data for %(field_name)s which must be unique "
+"for the %(lookup)s in %(date_field)s."
+msgstr ""
+"%(field_name)s sahəsinə görə təkrarlanan məlumatlara düzəliş edin, onlar "
+"%(date_field)s %(lookup)s-a görə fərqli olmalıdır."
+
+msgid "Please correct the duplicate values below."
+msgstr "Aşağıda təkrarlanan qiymətlərə düzəliş edin."
+
+msgid "The inline value did not match the parent instance."
+msgstr "Sətiriçi dəyər ana nüsxəyə uyğun deyil."
+
+msgid "Select a valid choice. That choice is not one of the available choices."
+msgstr "Düzgün seçim edin. Bu seçim mümkün deyil."
+
+#, python-format
+msgid "“%(pk)s” is not a valid value."
+msgstr "“%(pk)s” düzgün dəyər deyil."
+
+#, python-format
+msgid ""
+"%(datetime)s couldn’t be interpreted in time zone %(current_timezone)s; it "
+"may be ambiguous or it may not exist."
+msgstr ""
+"%(datetime)s vaxtı %(current_timezone)s zaman qurşağında ifadə oluna bilmir; "
+"ya duallıq, ya da mövcud olmaya bilər."
+
+msgid "Clear"
+msgstr "Təmizlə"
+
+msgid "Currently"
+msgstr "Hal-hazırda"
+
+msgid "Change"
+msgstr "Dəyiş"
+
+msgid "Unknown"
+msgstr "Məlum deyil"
+
+msgid "Yes"
+msgstr "Hə"
+
+msgid "No"
+msgstr "Yox"
+
+#. Translators: Please do not add spaces around commas.
+msgid "yes,no,maybe"
+msgstr "hə,yox,bəlkə"
+
+#, python-format
+msgid "%(size)d byte"
+msgid_plural "%(size)d bytes"
+msgstr[0] "%(size)d bayt"
+msgstr[1] "%(size)d bayt"
+
+#, python-format
+msgid "%s KB"
+msgstr "%s KB"
+
+#, python-format
+msgid "%s MB"
+msgstr "%s MB"
+
+#, python-format
+msgid "%s GB"
+msgstr "%s QB"
+
+#, python-format
+msgid "%s TB"
+msgstr "%s TB"
+
+#, python-format
+msgid "%s PB"
+msgstr "%s PB"
+
+msgid "p.m."
+msgstr "p.m."
+
+msgid "a.m."
+msgstr "a.m."
+
+msgid "PM"
+msgstr "PM"
+
+msgid "AM"
+msgstr "AM"
+
+msgid "midnight"
+msgstr "gecə yarısı"
+
+msgid "noon"
+msgstr "günorta"
+
+msgid "Monday"
+msgstr "Bazar ertəsi"
+
+msgid "Tuesday"
+msgstr "Çərşənbə axşamı"
+
+msgid "Wednesday"
+msgstr "Çərşənbə"
+
+msgid "Thursday"
+msgstr "Cümə axşamı"
+
+msgid "Friday"
+msgstr "Cümə"
+
+msgid "Saturday"
+msgstr "Şənbə"
+
+msgid "Sunday"
+msgstr "Bazar"
+
+msgid "Mon"
+msgstr "B.e"
+
+msgid "Tue"
+msgstr "Ç.a"
+
+msgid "Wed"
+msgstr "Çrş"
+
+msgid "Thu"
+msgstr "C.a"
+
+msgid "Fri"
+msgstr "Cüm"
+
+msgid "Sat"
+msgstr "Şnb"
+
+msgid "Sun"
+msgstr "Bzr"
+
+msgid "January"
+msgstr "Yanvar"
+
+msgid "February"
+msgstr "Fevral"
+
+msgid "March"
+msgstr "Mart"
+
+msgid "April"
+msgstr "Aprel"
+
+msgid "May"
+msgstr "May"
+
+msgid "June"
+msgstr "İyun"
+
+msgid "July"
+msgstr "İyul"
+
+msgid "August"
+msgstr "Avqust"
+
+msgid "September"
+msgstr "Sentyabr"
+
+msgid "October"
+msgstr "Oktyabr"
+
+msgid "November"
+msgstr "Noyabr"
+
+msgid "December"
+msgstr "Dekabr"
+
+msgid "jan"
+msgstr "ynv"
+
+msgid "feb"
+msgstr "fvr"
+
+msgid "mar"
+msgstr "mar"
+
+msgid "apr"
+msgstr "apr"
+
+msgid "may"
+msgstr "may"
+
+msgid "jun"
+msgstr "iyn"
+
+msgid "jul"
+msgstr "iyl"
+
+msgid "aug"
+msgstr "avq"
+
+msgid "sep"
+msgstr "snt"
+
+msgid "oct"
+msgstr "okt"
+
+msgid "nov"
+msgstr "noy"
+
+msgid "dec"
+msgstr "dek"
+
+msgctxt "abbrev. month"
+msgid "Jan."
+msgstr "Yan."
+
+msgctxt "abbrev. month"
+msgid "Feb."
+msgstr "Fev."
+
+msgctxt "abbrev. month"
+msgid "March"
+msgstr "Mart"
+
+msgctxt "abbrev. month"
+msgid "April"
+msgstr "Aprel"
+
+msgctxt "abbrev. month"
+msgid "May"
+msgstr "May"
+
+msgctxt "abbrev. month"
+msgid "June"
+msgstr "İyun"
+
+msgctxt "abbrev. month"
+msgid "July"
+msgstr "İyul"
+
+msgctxt "abbrev. month"
+msgid "Aug."
+msgstr "Avq."
+
+msgctxt "abbrev. month"
+msgid "Sept."
+msgstr "Sent."
+
+msgctxt "abbrev. month"
+msgid "Oct."
+msgstr "Okt."
+
+msgctxt "abbrev. month"
+msgid "Nov."
+msgstr "Noy."
+
+msgctxt "abbrev. month"
+msgid "Dec."
+msgstr "Dek."
+
+msgctxt "alt. month"
+msgid "January"
+msgstr "Yanvar"
+
+msgctxt "alt. month"
+msgid "February"
+msgstr "Fevral"
+
+msgctxt "alt. month"
+msgid "March"
+msgstr "Mart"
+
+msgctxt "alt. month"
+msgid "April"
+msgstr "Aprel"
+
+msgctxt "alt. month"
+msgid "May"
+msgstr "May"
+
+msgctxt "alt. month"
+msgid "June"
+msgstr "İyun"
+
+msgctxt "alt. month"
+msgid "July"
+msgstr "İyul"
+
+msgctxt "alt. month"
+msgid "August"
+msgstr "Avqust"
+
+msgctxt "alt. month"
+msgid "September"
+msgstr "Sentyabr"
+
+msgctxt "alt. month"
+msgid "October"
+msgstr "Oktyabr"
+
+msgctxt "alt. month"
+msgid "November"
+msgstr "Noyabr"
+
+msgctxt "alt. month"
+msgid "December"
+msgstr "Dekabr"
+
+msgid "This is not a valid IPv6 address."
+msgstr "Bu doğru IPv6 ünvanı deyil."
+
+#, python-format
+msgctxt "String to return when truncating text"
+msgid "%(truncated_text)s…"
+msgstr "%(truncated_text)s…"
+
+msgid "or"
+msgstr "və ya"
+
+#. Translators: This string is used as a separator between list elements
+msgid ", "
+msgstr ", "
+
+#, python-format
+msgid "%(num)d year"
+msgid_plural "%(num)d years"
+msgstr[0] "%(num)d il"
+msgstr[1] "%(num)d il"
+
+#, python-format
+msgid "%(num)d month"
+msgid_plural "%(num)d months"
+msgstr[0] "%(num)d ay"
+msgstr[1] "%(num)d ay"
+
+#, python-format
+msgid "%(num)d week"
+msgid_plural "%(num)d weeks"
+msgstr[0] "%(num)d həftə"
+msgstr[1] "%(num)d həftə"
+
+#, python-format
+msgid "%(num)d day"
+msgid_plural "%(num)d days"
+msgstr[0] "%(num)d gün"
+msgstr[1] "%(num)d gün"
+
+#, python-format
+msgid "%(num)d hour"
+msgid_plural "%(num)d hours"
+msgstr[0] "%(num)d saat"
+msgstr[1] "%(num)d saat"
+
+#, python-format
+msgid "%(num)d minute"
+msgid_plural "%(num)d minutes"
+msgstr[0] "%(num)d dəqiqə"
+msgstr[1] "%(num)d dəqiqə"
+
+msgid "Forbidden"
+msgstr "Qadağan"
+
+msgid "CSRF verification failed. Request aborted."
+msgstr "CSRF təsdiqləmə alınmadı. Sorğu ləğv edildi."
+
+msgid ""
+"You are seeing this message because this HTTPS site requires a “Referer "
+"header” to be sent by your web browser, but none was sent. This header is "
+"required for security reasons, to ensure that your browser is not being "
+"hijacked by third parties."
+msgstr ""
+
+msgid ""
+"If you have configured your browser to disable “Referer” headers, please re-"
+"enable them, at least for this site, or for HTTPS connections, or for “same-"
+"origin” requests."
+msgstr ""
+"Əgər səyyahınızın “Referer” başlığını göndərməsini söndürmüsünüzsə, lütfən "
+"bu sayt üçün, HTTPS əlaqələr üçün və ya “same-origin” sorğular üçün aktiv "
+"edin."
+
+msgid ""
+"If you are using the tag or "
+"including the “Referrer-Policy: no-referrer” header, please remove them. The "
+"CSRF protection requires the “Referer” header to do strict referer checking. "
+"If you’re concerned about privacy, use alternatives like for links to third-party sites."
+msgstr ""
+"Əgər etiketini və ya "
+"“Referrer-Policy: no-referrer” başlığını işlədirsinizsə, lütfən silin. CSRF "
+"qoruma dəqiq yönləndirən yoxlaması üçün “Referer” başlığını tələb edir. Əgər "
+"məxfilik üçün düşünürsünüzsə, üçüncü tərəf sayt keçidləri üçün kimi bir alternativ işlədin."
+
+msgid ""
+"You are seeing this message because this site requires a CSRF cookie when "
+"submitting forms. This cookie is required for security reasons, to ensure "
+"that your browser is not being hijacked by third parties."
+msgstr ""
+"Bu sayt formaları göndərmək üçün CSRF çərəzini işlədir. Bu çərəz "
+"səyyahınızın üçüncü biri tərəfindən hack-lənmədiyinə əmin olmaq üçün "
+"istifadə edilir. "
+
+msgid ""
+"If you have configured your browser to disable cookies, please re-enable "
+"them, at least for this site, or for “same-origin” requests."
+msgstr ""
+"Əgər səyyahınızda çərəzlər söndürülübsə, lütfən bu sayt və ya “same-origin” "
+"sorğular üçün aktiv edin."
+
+msgid "More information is available with DEBUG=True."
+msgstr "Daha ətraflı məlumat DEBUG=True ilə mövcuddur."
+
+msgid "No year specified"
+msgstr "İl göstərilməyib"
+
+msgid "Date out of range"
+msgstr "Tarix aralığın xaricindədir"
+
+msgid "No month specified"
+msgstr "Ay göstərilməyib"
+
+msgid "No day specified"
+msgstr "Gün göstərilməyib"
+
+msgid "No week specified"
+msgstr "Həftə göstərilməyib"
+
+#, python-format
+msgid "No %(verbose_name_plural)s available"
+msgstr "%(verbose_name_plural)s seçmək mümkün deyil"
+
+#, python-format
+msgid ""
+"Future %(verbose_name_plural)s not available because %(class_name)s."
+"allow_future is False."
+msgstr ""
+"Gələcək %(verbose_name_plural)s seçmək mümkün deyil, çünki %(class_name)s."
+"allow_future Yalan kimi qeyd olunub."
+
+#, python-format
+msgid "Invalid date string “%(datestr)s” given format “%(format)s”"
+msgstr "“%(format)s” formatına görə “%(datestr)s” tarixi düzgün deyil"
+
+#, python-format
+msgid "No %(verbose_name)s found matching the query"
+msgstr "Sorğuya uyğun %(verbose_name)s tapılmadı"
+
+msgid "Page is not “last”, nor can it be converted to an int."
+msgstr "Səhifə həm “axırıncı” deyil, həm də tam ədədə çevrilə bilmir."
+
+#, python-format
+msgid "Invalid page (%(page_number)s): %(message)s"
+msgstr "Qeyri-düzgün səhifə (%(page_number)s): %(message)s"
+
+#, python-format
+msgid "Empty list and “%(class_name)s.allow_empty” is False."
+msgstr "Siyahı boşdur və “%(class_name)s.allow_empty” dəyəri False-dur."
+
+msgid "Directory indexes are not allowed here."
+msgstr "Ünvan indekslərinə icazə verilmir."
+
+#, python-format
+msgid "“%(path)s” does not exist"
+msgstr "“%(path)s” mövcud deyil"
+
+#, python-format
+msgid "Index of %(directory)s"
+msgstr "%(directory)s-nin indeksi"
+
+msgid "The install worked successfully! Congratulations!"
+msgstr "Quruluş uğurla tamamlandı! Təbriklər!"
+
+#, python-format
+msgid ""
+"View release notes for Django %(version)s"
+msgstr ""
+"Django %(version)s üçün buraxılış "
+"qeydlərinə baxın"
+
+#, python-format
+msgid ""
+"You are seeing this page because DEBUG=True is in your settings file and you have not configured any "
+"URLs."
+msgstr ""
+"Tənzimləmə faylınızda DEBUG=True və heç bir URL qurmadığınız üçün bu səhifəni görürsünüz."
+
+msgid "Django Documentation"
+msgstr "Django Sənədləri"
+
+msgid "Topics, references, & how-to’s"
+msgstr "Mövzular, istinadlar və nümunələr"
+
+msgid "Tutorial: A Polling App"
+msgstr "Məşğələ: Səsvermə Tətbiqi"
+
+msgid "Get started with Django"
+msgstr "Django-ya başla"
+
+msgid "Django Community"
+msgstr "Django İcması"
+
+msgid "Connect, get help, or contribute"
+msgstr "Qoşul, kömək al və dəstək ol"
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/az/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/az/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/az/formats.py b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/az/formats.py
new file mode 100644
index 00000000..253b6ddd
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/az/formats.py
@@ -0,0 +1,30 @@
+# This file is distributed under the same license as the Django package.
+#
+# The *_FORMAT strings use the Django date format syntax,
+# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+DATE_FORMAT = "j E Y"
+TIME_FORMAT = "G:i"
+DATETIME_FORMAT = "j E Y, G:i"
+YEAR_MONTH_FORMAT = "F Y"
+MONTH_DAY_FORMAT = "j F"
+SHORT_DATE_FORMAT = "d.m.Y"
+SHORT_DATETIME_FORMAT = "d.m.Y H:i"
+FIRST_DAY_OF_WEEK = 1 # Monday
+
+# The *_INPUT_FORMATS strings use the Python strftime format syntax,
+# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
+DATE_INPUT_FORMATS = [
+ "%d.%m.%Y", # '25.10.2006'
+ "%d.%m.%y", # '25.10.06'
+]
+DATETIME_INPUT_FORMATS = [
+ "%d.%m.%Y %H:%M:%S", # '25.10.2006 14:30:59'
+ "%d.%m.%Y %H:%M:%S.%f", # '25.10.2006 14:30:59.000200'
+ "%d.%m.%Y %H:%M", # '25.10.2006 14:30'
+ "%d.%m.%y %H:%M:%S", # '25.10.06 14:30:59'
+ "%d.%m.%y %H:%M:%S.%f", # '25.10.06 14:30:59.000200'
+ "%d.%m.%y %H:%M", # '25.10.06 14:30'
+]
+DECIMAL_SEPARATOR = ","
+THOUSAND_SEPARATOR = "\xa0" # non-breaking space
+NUMBER_GROUPING = 3
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/be/LC_MESSAGES/django.mo b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/be/LC_MESSAGES/django.mo
new file mode 100644
index 00000000..6f150aa4
Binary files /dev/null and b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/be/LC_MESSAGES/django.mo differ
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/be/LC_MESSAGES/django.po b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/be/LC_MESSAGES/django.po
new file mode 100644
index 00000000..0c7e7db6
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/be/LC_MESSAGES/django.po
@@ -0,0 +1,1371 @@
+# This file is distributed under the same license as the Django package.
+#
+# Translators:
+# Viktar Palstsiuk , 2014-2015
+# znotdead , 2016-2017,2019-2021,2023
+# Bobsans , 2016
+msgid ""
+msgstr ""
+"Project-Id-Version: django\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-01-17 02:13-0600\n"
+"PO-Revision-Date: 2023-04-25 06:49+0000\n"
+"Last-Translator: znotdead , 2016-2017,2019-2021,2023\n"
+"Language-Team: Belarusian (http://www.transifex.com/django/django/language/"
+"be/)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Language: be\n"
+"Plural-Forms: nplurals=4; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && "
+"n%10<=4 && (n%100<12 || n%100>14) ? 1 : n%10==0 || (n%10>=5 && n%10<=9) || "
+"(n%100>=11 && n%100<=14)? 2 : 3);\n"
+
+msgid "Afrikaans"
+msgstr "Афрыкаанс"
+
+msgid "Arabic"
+msgstr "Арабская"
+
+msgid "Algerian Arabic"
+msgstr "Алжырская арабская"
+
+msgid "Asturian"
+msgstr "Астурыйская"
+
+msgid "Azerbaijani"
+msgstr "Азэрбайджанская"
+
+msgid "Bulgarian"
+msgstr "Баўгарская"
+
+msgid "Belarusian"
+msgstr "Беларуская"
+
+msgid "Bengali"
+msgstr "Бэнґальская"
+
+msgid "Breton"
+msgstr "Брэтонская"
+
+msgid "Bosnian"
+msgstr "Басьнійская"
+
+msgid "Catalan"
+msgstr "Каталёнская"
+
+msgid "Central Kurdish (Sorani)"
+msgstr "Цэнтральнакурдская (сарані)"
+
+msgid "Czech"
+msgstr "Чэская"
+
+msgid "Welsh"
+msgstr "Валійская"
+
+msgid "Danish"
+msgstr "Дацкая"
+
+msgid "German"
+msgstr "Нямецкая"
+
+msgid "Lower Sorbian"
+msgstr "Ніжнелужыцкая"
+
+msgid "Greek"
+msgstr "Грэцкая"
+
+msgid "English"
+msgstr "Анґельская"
+
+msgid "Australian English"
+msgstr "Анґельская (Аўстралія)"
+
+msgid "British English"
+msgstr "Анґельская (Брытанская)"
+
+msgid "Esperanto"
+msgstr "Эспэранта"
+
+msgid "Spanish"
+msgstr "Гішпанская"
+
+msgid "Argentinian Spanish"
+msgstr "Гішпанская (Арґентына)"
+
+msgid "Colombian Spanish"
+msgstr "Гішпанская (Калумбія)"
+
+msgid "Mexican Spanish"
+msgstr "Гішпанская (Мэксыка)"
+
+msgid "Nicaraguan Spanish"
+msgstr "Гішпанская (Нікараґуа)"
+
+msgid "Venezuelan Spanish"
+msgstr "Іспанская (Вэнэсуэла)"
+
+msgid "Estonian"
+msgstr "Эстонская"
+
+msgid "Basque"
+msgstr "Басконская"
+
+msgid "Persian"
+msgstr "Фарсі"
+
+msgid "Finnish"
+msgstr "Фінская"
+
+msgid "French"
+msgstr "Француская"
+
+msgid "Frisian"
+msgstr "Фрызкая"
+
+msgid "Irish"
+msgstr "Ірляндзкая"
+
+msgid "Scottish Gaelic"
+msgstr "Гэльская шатляндзкая"
+
+msgid "Galician"
+msgstr "Ґальская"
+
+msgid "Hebrew"
+msgstr "Габрэйская"
+
+msgid "Hindi"
+msgstr "Гінды"
+
+msgid "Croatian"
+msgstr "Харвацкая"
+
+msgid "Upper Sorbian"
+msgstr "Верхнелужыцкая"
+
+msgid "Hungarian"
+msgstr "Вугорская"
+
+msgid "Armenian"
+msgstr "Армянскі"
+
+msgid "Interlingua"
+msgstr "Інтэрлінгва"
+
+msgid "Indonesian"
+msgstr "Інданэзійская"
+
+msgid "Igbo"
+msgstr "Ігба"
+
+msgid "Ido"
+msgstr "Іда"
+
+msgid "Icelandic"
+msgstr "Ісьляндзкая"
+
+msgid "Italian"
+msgstr "Італьянская"
+
+msgid "Japanese"
+msgstr "Японская"
+
+msgid "Georgian"
+msgstr "Грузінская"
+
+msgid "Kabyle"
+msgstr "Кабільскі"
+
+msgid "Kazakh"
+msgstr "Казаская"
+
+msgid "Khmer"
+msgstr "Кхмерская"
+
+msgid "Kannada"
+msgstr "Каннада"
+
+msgid "Korean"
+msgstr "Карэйская"
+
+msgid "Kyrgyz"
+msgstr "Кіргізская"
+
+msgid "Luxembourgish"
+msgstr "Люксэмбургская"
+
+msgid "Lithuanian"
+msgstr "Літоўская"
+
+msgid "Latvian"
+msgstr "Латыская"
+
+msgid "Macedonian"
+msgstr "Македонская"
+
+msgid "Malayalam"
+msgstr "Малаялам"
+
+msgid "Mongolian"
+msgstr "Манґольская"
+
+msgid "Marathi"
+msgstr "Маратхі"
+
+msgid "Malay"
+msgstr "Малайская"
+
+msgid "Burmese"
+msgstr "Бірманская"
+
+msgid "Norwegian Bokmål"
+msgstr "Нарвэская букмал"
+
+msgid "Nepali"
+msgstr "Нэпальская"
+
+msgid "Dutch"
+msgstr "Галяндзкая"
+
+msgid "Norwegian Nynorsk"
+msgstr "Нарвэская нюнорск"
+
+msgid "Ossetic"
+msgstr "Асяцінская"
+
+msgid "Punjabi"
+msgstr "Панджабі"
+
+msgid "Polish"
+msgstr "Польская"
+
+msgid "Portuguese"
+msgstr "Партуґальская"
+
+msgid "Brazilian Portuguese"
+msgstr "Партуґальская (Бразылія)"
+
+msgid "Romanian"
+msgstr "Румынская"
+
+msgid "Russian"
+msgstr "Расейская"
+
+msgid "Slovak"
+msgstr "Славацкая"
+
+msgid "Slovenian"
+msgstr "Славенская"
+
+msgid "Albanian"
+msgstr "Альбанская"
+
+msgid "Serbian"
+msgstr "Сэрбская"
+
+msgid "Serbian Latin"
+msgstr "Сэрбская (лацінка)"
+
+msgid "Swedish"
+msgstr "Швэдзкая"
+
+msgid "Swahili"
+msgstr "Суахілі"
+
+msgid "Tamil"
+msgstr "Тамільская"
+
+msgid "Telugu"
+msgstr "Тэлуґу"
+
+msgid "Tajik"
+msgstr "Таджыкскі"
+
+msgid "Thai"
+msgstr "Тайская"
+
+msgid "Turkmen"
+msgstr "Туркменская"
+
+msgid "Turkish"
+msgstr "Турэцкая"
+
+msgid "Tatar"
+msgstr "Татарская"
+
+msgid "Udmurt"
+msgstr "Удмурцкая"
+
+msgid "Ukrainian"
+msgstr "Украінская"
+
+msgid "Urdu"
+msgstr "Урду"
+
+msgid "Uzbek"
+msgstr "Узбецкі"
+
+msgid "Vietnamese"
+msgstr "Віетнамская"
+
+msgid "Simplified Chinese"
+msgstr "Кітайская (спрошчаная)"
+
+msgid "Traditional Chinese"
+msgstr "Кітайская (звычайная)"
+
+msgid "Messages"
+msgstr "Паведамленні"
+
+msgid "Site Maps"
+msgstr "Мапы сайту"
+
+msgid "Static Files"
+msgstr "Cтатычныя файлы"
+
+msgid "Syndication"
+msgstr "Сындыкацыя"
+
+#. Translators: String used to replace omitted page numbers in elided page
+#. range generated by paginators, e.g. [1, 2, '…', 5, 6, 7, '…', 9, 10].
+msgid "…"
+msgstr "..."
+
+msgid "That page number is not an integer"
+msgstr "Лік гэтай старонкі не з'яўляецца цэлым лікам"
+
+msgid "That page number is less than 1"
+msgstr "Лік старонкі менш чым 1"
+
+msgid "That page contains no results"
+msgstr "Гэтая старонка не мае ніякіх вынікаў"
+
+msgid "Enter a valid value."
+msgstr "Пазначце правільнае значэньне."
+
+msgid "Enter a valid URL."
+msgstr "Пазначце чынную спасылку."
+
+msgid "Enter a valid integer."
+msgstr "Увядзіце цэлы лік."
+
+msgid "Enter a valid email address."
+msgstr "Увядзіце сапраўдны адрас электроннай пошты."
+
+#. Translators: "letters" means latin letters: a-z and A-Z.
+msgid ""
+"Enter a valid “slug” consisting of letters, numbers, underscores or hyphens."
+msgstr ""
+"Значэнне павінна быць толькі з літараў, личбаў, знакаў падкрэслівання ці "
+"злучкі."
+
+msgid ""
+"Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or "
+"hyphens."
+msgstr ""
+"Значэнне павінна быць толькі з літараў стандарту Unicode, личбаў, знакаў "
+"падкрэслівання ці злучкі."
+
+msgid "Enter a valid IPv4 address."
+msgstr "Пазначце чынны адрас IPv4."
+
+msgid "Enter a valid IPv6 address."
+msgstr "Пазначце чынны адрас IPv6."
+
+msgid "Enter a valid IPv4 or IPv6 address."
+msgstr "Пазначце чынны адрас IPv4 або IPv6."
+
+msgid "Enter only digits separated by commas."
+msgstr "Набярыце лічбы, падзеленыя коскамі."
+
+#, python-format
+msgid "Ensure this value is %(limit_value)s (it is %(show_value)s)."
+msgstr ""
+"Упэўніцеся, што гэтае значэньне — %(limit_value)s (зараз яно — "
+"%(show_value)s)."
+
+#, python-format
+msgid "Ensure this value is less than or equal to %(limit_value)s."
+msgstr "Значэньне мусіць быць меншым або роўным %(limit_value)s."
+
+#, python-format
+msgid "Ensure this value is greater than or equal to %(limit_value)s."
+msgstr "Значэньне мусіць быць большым або роўным %(limit_value)s."
+
+#, python-format
+msgid "Ensure this value is a multiple of step size %(limit_value)s."
+msgstr "Пераканайцеся, што гэта значэнне кратнае памеру кроку %(limit_value)s."
+
+#, python-format
+msgid ""
+"Ensure this value has at least %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at least %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+"Упэўніцеся, што гэтае значэнне мае не менш %(limit_value)d сімвал (зараз "
+"%(show_value)d)."
+msgstr[1] ""
+"Упэўніцеся, што гэтае значэнне мае не менш %(limit_value)d сімвала (зараз "
+"%(show_value)d)."
+msgstr[2] ""
+"Упэўніцеся, што гэтае значэнне мае не менш %(limit_value)d сімвалаў (зараз "
+"%(show_value)d)."
+msgstr[3] ""
+"Упэўніцеся, што гэтае значэнне мае не менш %(limit_value)d сімвалаў (зараз "
+"%(show_value)d)."
+
+#, python-format
+msgid ""
+"Ensure this value has at most %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at most %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+"Упэўніцеся, што гэтае значэнне мае не болей %(limit_value)d сімвал (зараз "
+"%(show_value)d)."
+msgstr[1] ""
+"Упэўніцеся, што гэтае значэнне мае не болей %(limit_value)d сімвала (зараз "
+"%(show_value)d)."
+msgstr[2] ""
+"Упэўніцеся, што гэтае значэнне мае не болей %(limit_value)d сімвалаў (зараз "
+"%(show_value)d)."
+msgstr[3] ""
+"Упэўніцеся, што гэтае значэнне мае не болей %(limit_value)d сімвалаў (зараз "
+"%(show_value)d)."
+
+msgid "Enter a number."
+msgstr "Набярыце лік."
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s digit in total."
+msgid_plural "Ensure that there are no more than %(max)s digits in total."
+msgstr[0] "Упэўніцеся, што набралі ня болей за %(max)s лічбу."
+msgstr[1] "Упэўніцеся, што набралі ня болей за %(max)s лічбы."
+msgstr[2] "Упэўніцеся, што набралі ня болей за %(max)s лічбаў."
+msgstr[3] "Упэўніцеся, што набралі ня болей за %(max)s лічбаў."
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s decimal place."
+msgid_plural "Ensure that there are no more than %(max)s decimal places."
+msgstr[0] "Упэўніцеся, што набралі ня болей за %(max)s лічбу пасьля коскі."
+msgstr[1] "Упэўніцеся, што набралі ня болей за %(max)s лічбы пасьля коскі."
+msgstr[2] "Упэўніцеся, што набралі ня болей за %(max)s лічбаў пасьля коскі."
+msgstr[3] "Упэўніцеся, што набралі ня болей за %(max)s лічбаў пасьля коскі."
+
+#, python-format
+msgid ""
+"Ensure that there are no more than %(max)s digit before the decimal point."
+msgid_plural ""
+"Ensure that there are no more than %(max)s digits before the decimal point."
+msgstr[0] "Упэўніцеся, што набралі ня болей за %(max)s лічбу да коскі."
+msgstr[1] "Упэўніцеся, што набралі ня болей за %(max)s лічбы да коскі."
+msgstr[2] "Упэўніцеся, што набралі ня болей за %(max)s лічбаў да коскі."
+msgstr[3] "Упэўніцеся, што набралі ня болей за %(max)s лічбаў да коскі."
+
+#, python-format
+msgid ""
+"File extension “%(extension)s” is not allowed. Allowed extensions are: "
+"%(allowed_extensions)s."
+msgstr ""
+"Пашырэнне файла “%(extension)s” не дапускаецца. Дапушчальныя пашырэння: "
+"%(allowed_extensions)s."
+
+msgid "Null characters are not allowed."
+msgstr "Null сімвалы не дапускаюцца."
+
+msgid "and"
+msgstr "і"
+
+#, python-format
+msgid "%(model_name)s with this %(field_labels)s already exists."
+msgstr "%(model_name)s з такім %(field_labels)s ужо існуе."
+
+#, python-format
+msgid "Constraint “%(name)s” is violated."
+msgstr "Абмежаванне \"%(name)s\" парушана."
+
+#, python-format
+msgid "Value %(value)r is not a valid choice."
+msgstr "Значэнне %(value)r не з'яўляецца правільным выбарам."
+
+msgid "This field cannot be null."
+msgstr "Поле ня можа мець значэньне «null»."
+
+msgid "This field cannot be blank."
+msgstr "Трэба запоўніць поле."
+
+#, python-format
+msgid "%(model_name)s with this %(field_label)s already exists."
+msgstr "%(model_name)s з такім %(field_label)s ужо існуе."
+
+#. Translators: The 'lookup_type' is one of 'date', 'year' or
+#. 'month'. Eg: "Title must be unique for pub_date year"
+#, python-format
+msgid ""
+"%(field_label)s must be unique for %(date_field_label)s %(lookup_type)s."
+msgstr ""
+"%(field_label)s павінна быць унікальна для %(date_field_label)s "
+"%(lookup_type)s."
+
+#, python-format
+msgid "Field of type: %(field_type)s"
+msgstr "Палі віду: %(field_type)s"
+
+#, python-format
+msgid "“%(value)s” value must be either True or False."
+msgstr "Значэньне “%(value)s” павінна быць True альбо False."
+
+#, python-format
+msgid "“%(value)s” value must be either True, False, or None."
+msgstr "Значэньне “%(value)s” павінна быць True, False альбо None."
+
+msgid "Boolean (Either True or False)"
+msgstr "Ляґічнае («сапраўдна» або «не сапраўдна»)"
+
+#, python-format
+msgid "String (up to %(max_length)s)"
+msgstr "Радок (ня болей за %(max_length)s)"
+
+msgid "String (unlimited)"
+msgstr "Радок (неабмежаваны)"
+
+msgid "Comma-separated integers"
+msgstr "Цэлыя лікі, падзеленыя коскаю"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid date format. It must be in YYYY-MM-DD "
+"format."
+msgstr ""
+"Значэнне “%(value)s” мае няправільны фармат. Яно павінна быць у фармаце ГГГГ-"
+"ММ-ДД."
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD) but it is an invalid "
+"date."
+msgstr ""
+"Значэнне “%(value)s” мае правільны фармат(ГГГГ-ММ-ДД) але гэта несапраўдная "
+"дата."
+
+msgid "Date (without time)"
+msgstr "Дата (бяз часу)"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in YYYY-MM-DD HH:MM[:ss[."
+"uuuuuu]][TZ] format."
+msgstr ""
+"Значэнне “%(value)s” мае няправільны фармат. Яно павінна быць у фармаце ГГГГ-"
+"ММ-ДД ГГ:ХХ[:сс[.мммммм]][ЧА], дзе ЧА — часавы абсяг."
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD HH:MM[:ss[.uuuuuu]]"
+"[TZ]) but it is an invalid date/time."
+msgstr ""
+"Значэнне “%(value)s” мае правільны фармат (ГГГГ-ММ-ДД ГГ:ХХ[:сс[.мммммм]]"
+"[ЧА], дзе ЧА — часавы абсяг) але гэта несапраўдныя дата/час."
+
+msgid "Date (with time)"
+msgstr "Дата (разам з часам)"
+
+#, python-format
+msgid "“%(value)s” value must be a decimal number."
+msgstr "Значэньне “%(value)s” павінна быць дзесятковым лікам."
+
+msgid "Decimal number"
+msgstr "Дзесятковы лік"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in [DD] [[HH:]MM:]ss[."
+"uuuuuu] format."
+msgstr ""
+"Значэньне “%(value)s” мае няправільны фармат. Яно павінна быць у фармаце "
+"[ДД] [ГГ:[ХХ:]]сс[.мммммм]."
+
+msgid "Duration"
+msgstr "Працягласць"
+
+msgid "Email address"
+msgstr "Адрас эл. пошты"
+
+msgid "File path"
+msgstr "Шлях да файла"
+
+#, python-format
+msgid "“%(value)s” value must be a float."
+msgstr "Значэньне “%(value)s” павінна быць дробным лікам."
+
+msgid "Floating point number"
+msgstr "Лік зь пераноснай коскаю"
+
+#, python-format
+msgid "“%(value)s” value must be an integer."
+msgstr "Значэньне “%(value)s” павінна быць цэлым лікам."
+
+msgid "Integer"
+msgstr "Цэлы лік"
+
+msgid "Big (8 byte) integer"
+msgstr "Вялікі (8 байтаў) цэлы"
+
+msgid "Small integer"
+msgstr "Малы цэлы лік"
+
+msgid "IPv4 address"
+msgstr "Адрас IPv4"
+
+msgid "IP address"
+msgstr "Адрас IP"
+
+#, python-format
+msgid "“%(value)s” value must be either None, True or False."
+msgstr "Значэньне “%(value)s” павінна быць None, True альбо False."
+
+msgid "Boolean (Either True, False or None)"
+msgstr "Ляґічнае («сапраўдна», «не сапраўдна» ці «нічога»)"
+
+msgid "Positive big integer"
+msgstr "Дадатны вялікі цэлы лік"
+
+msgid "Positive integer"
+msgstr "Дадатны цэлы лік"
+
+msgid "Positive small integer"
+msgstr "Дадатны малы цэлы лік"
+
+#, python-format
+msgid "Slug (up to %(max_length)s)"
+msgstr "Бірка (ня болей за %(max_length)s)"
+
+msgid "Text"
+msgstr "Тэкст"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in HH:MM[:ss[.uuuuuu]] "
+"format."
+msgstr ""
+"Значэньне “%(value)s” мае няправільны фармат. Яно павінна быць у фармаце ГГ:"
+"ХХ[:сс[.мммммм]]."
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (HH:MM[:ss[.uuuuuu]]) but it is an "
+"invalid time."
+msgstr ""
+"Значэнне “%(value)s” мае правільны фармат (ГГ:ХХ[:сс[.мммммм]]) але гэта "
+"несапраўдны час."
+
+msgid "Time"
+msgstr "Час"
+
+msgid "URL"
+msgstr "Сеціўная спасылка"
+
+msgid "Raw binary data"
+msgstr "Неапрацаваныя бінарныя зьвесткі"
+
+#, python-format
+msgid "“%(value)s” is not a valid UUID."
+msgstr "“%(value)s” не з'яўляецца дапушчальным UUID."
+
+msgid "Universally unique identifier"
+msgstr "Універсальны непаўторны ідэнтыфікатар"
+
+msgid "File"
+msgstr "Файл"
+
+msgid "Image"
+msgstr "Выява"
+
+msgid "A JSON object"
+msgstr "Аб'ект JSON"
+
+msgid "Value must be valid JSON."
+msgstr "Значэньне павінна быць сапраўдным JSON."
+
+#, python-format
+msgid "%(model)s instance with %(field)s %(value)r does not exist."
+msgstr "Экземпляр %(model)s з %(field)s %(value)r не iснуе."
+
+msgid "Foreign Key (type determined by related field)"
+msgstr "Вонкавы ключ (від вызначаецца паводле зьвязанага поля)"
+
+msgid "One-to-one relationship"
+msgstr "Сувязь «адзін да аднаго»"
+
+#, python-format
+msgid "%(from)s-%(to)s relationship"
+msgstr "Сувязь %(from)s-%(to)s"
+
+#, python-format
+msgid "%(from)s-%(to)s relationships"
+msgstr "Сувязi %(from)s-%(to)s"
+
+msgid "Many-to-many relationship"
+msgstr "Сувязь «некалькі да некалькіх»"
+
+#. Translators: If found as last label character, these punctuation
+#. characters will prevent the default label_suffix to be appended to the
+#. label
+msgid ":?.!"
+msgstr ":?.!"
+
+msgid "This field is required."
+msgstr "Поле трэба запоўніць."
+
+msgid "Enter a whole number."
+msgstr "Набярыце ўвесь лік."
+
+msgid "Enter a valid date."
+msgstr "Пазначце чынную дату."
+
+msgid "Enter a valid time."
+msgstr "Пазначце чынны час."
+
+msgid "Enter a valid date/time."
+msgstr "Пазначце чынныя час і дату."
+
+msgid "Enter a valid duration."
+msgstr "Увядзіце сапраўдны тэрмін."
+
+#, python-brace-format
+msgid "The number of days must be between {min_days} and {max_days}."
+msgstr "Колькасць дзён павінна быць паміж {min_days} i {max_days}."
+
+msgid "No file was submitted. Check the encoding type on the form."
+msgstr "Файл не даслалі. Зірніце кадоўку блянку."
+
+msgid "No file was submitted."
+msgstr "Файл не даслалі."
+
+msgid "The submitted file is empty."
+msgstr "Дасланы файл — парожні."
+
+#, python-format
+msgid "Ensure this filename has at most %(max)d character (it has %(length)d)."
+msgid_plural ""
+"Ensure this filename has at most %(max)d characters (it has %(length)d)."
+msgstr[0] ""
+"Упэўніцеся, што гэтае імя файлу мае не болей %(max)d сімвал (зараз "
+"%(length)d)."
+msgstr[1] ""
+"Упэўніцеся, што гэтае імя файлу мае не болей %(max)d сімвала (зараз "
+"%(length)d)."
+msgstr[2] ""
+"Упэўніцеся, што гэтае імя файлу мае не болей %(max)d сімвалаў (зараз "
+"%(length)d)."
+msgstr[3] ""
+"Упэўніцеся, што гэтае імя файлу мае не болей %(max)d сімвалаў (зараз "
+"%(length)d)."
+
+msgid "Please either submit a file or check the clear checkbox, not both."
+msgstr ""
+"Трэба або даслаць файл, або абраць «Ачысьціць», але нельга рабіць гэта "
+"адначасова."
+
+msgid ""
+"Upload a valid image. The file you uploaded was either not an image or a "
+"corrupted image."
+msgstr ""
+"Запампаваць чынны малюнак. Запампавалі або не выяву, або пашкоджаную выяву."
+
+#, python-format
+msgid "Select a valid choice. %(value)s is not one of the available choices."
+msgstr "Абярыце дазволенае. %(value)s няма ў даступных значэньнях."
+
+msgid "Enter a list of values."
+msgstr "Упішыце сьпіс значэньняў."
+
+msgid "Enter a complete value."
+msgstr "Калі ласка, увядзіце поўнае значэньне."
+
+msgid "Enter a valid UUID."
+msgstr "Увядзіце сапраўдны UUID."
+
+msgid "Enter a valid JSON."
+msgstr "Пазначце сапраўдны JSON."
+
+#. Translators: This is the default suffix added to form field labels
+msgid ":"
+msgstr ":"
+
+#, python-format
+msgid "(Hidden field %(name)s) %(error)s"
+msgstr "(Схаванае поле %(name)s) %(error)s"
+
+#, python-format
+msgid ""
+"ManagementForm data is missing or has been tampered with. Missing fields: "
+"%(field_names)s. You may need to file a bug report if the issue persists."
+msgstr ""
+"Дадзеныя формы ManagementForm адсутнічаюць ці былі падменены. Адсутнічаюць "
+"палі: %(field_names)s. Магчыма, вам спатрэбіцца падаць справаздачу пра "
+"памылку, калі праблема захоўваецца."
+
+#, python-format
+msgid "Please submit at most %(num)d form."
+msgid_plural "Please submit at most %(num)d forms."
+msgstr[0] "Калі ласка, адпраўце не болей чым %(num)d формаў."
+msgstr[1] "Калі ласка, адпраўце не болей чым %(num)d формаў."
+msgstr[2] "Калі ласка, адпраўце не болей чым %(num)d формаў."
+msgstr[3] "Калі ласка, адпраўце не болей чым %(num)d формаў."
+
+#, python-format
+msgid "Please submit at least %(num)d form."
+msgid_plural "Please submit at least %(num)d forms."
+msgstr[0] "Калі ласка, адпраўце не менш чым %(num)d формаў."
+msgstr[1] "Калі ласка, адпраўце не менш чым %(num)d формаў."
+msgstr[2] "Калі ласка, адпраўце не менш чым %(num)d формаў."
+msgstr[3] "Калі ласка, адпраўце не менш чым %(num)d формаў."
+
+msgid "Order"
+msgstr "Парадак"
+
+msgid "Delete"
+msgstr "Выдаліць"
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s."
+msgstr "У полі «%(field)s» выпраўце зьвесткі, якія паўтараюцца."
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s, which must be unique."
+msgstr "Выпраўце зьвесткі ў полі «%(field)s»: нельга, каб яны паўтараліся."
+
+#, python-format
+msgid ""
+"Please correct the duplicate data for %(field_name)s which must be unique "
+"for the %(lookup)s in %(date_field)s."
+msgstr ""
+"Выпраўце зьвесткі ў полі «%(field_name)s»: нельга каб зьвесткі ў "
+"«%(date_field)s» для «%(lookup)s» паўтараліся."
+
+msgid "Please correct the duplicate values below."
+msgstr "Выпраўце зьвесткі, якія паўтараюцца (гл. ніжэй)."
+
+msgid "The inline value did not match the parent instance."
+msgstr "Убудаванае значэнне не супадае з бацькоўскім значэннем."
+
+msgid "Select a valid choice. That choice is not one of the available choices."
+msgstr "Абярыце дазволенае. Абранага няма ў даступных значэньнях."
+
+#, python-format
+msgid "“%(pk)s” is not a valid value."
+msgstr "“%(pk)s” не сапраўднае значэнне."
+
+#, python-format
+msgid ""
+"%(datetime)s couldn’t be interpreted in time zone %(current_timezone)s; it "
+"may be ambiguous or it may not exist."
+msgstr ""
+"У часавым абсягу %(current_timezone)s нельга зразумець дату %(datetime)s: "
+"яна можа быць неадназначнаю або яе можа не існаваць."
+
+msgid "Clear"
+msgstr "Ачысьціць"
+
+msgid "Currently"
+msgstr "Зараз"
+
+msgid "Change"
+msgstr "Зьмяніць"
+
+msgid "Unknown"
+msgstr "Невядома"
+
+msgid "Yes"
+msgstr "Так"
+
+msgid "No"
+msgstr "Не"
+
+#. Translators: Please do not add spaces around commas.
+msgid "yes,no,maybe"
+msgstr "так,не,магчыма"
+
+#, python-format
+msgid "%(size)d byte"
+msgid_plural "%(size)d bytes"
+msgstr[0] "%(size)d байт"
+msgstr[1] "%(size)d байты"
+msgstr[2] "%(size)d байтаў"
+msgstr[3] "%(size)d байтаў"
+
+#, python-format
+msgid "%s KB"
+msgstr "%s КБ"
+
+#, python-format
+msgid "%s MB"
+msgstr "%s МБ"
+
+#, python-format
+msgid "%s GB"
+msgstr "%s ҐБ"
+
+#, python-format
+msgid "%s TB"
+msgstr "%s ТБ"
+
+#, python-format
+msgid "%s PB"
+msgstr "%s ПБ"
+
+msgid "p.m."
+msgstr "папаўдні"
+
+msgid "a.m."
+msgstr "папоўначы"
+
+msgid "PM"
+msgstr "папаўдні"
+
+msgid "AM"
+msgstr "папоўначы"
+
+msgid "midnight"
+msgstr "поўнач"
+
+msgid "noon"
+msgstr "поўдзень"
+
+msgid "Monday"
+msgstr "Панядзелак"
+
+msgid "Tuesday"
+msgstr "Аўторак"
+
+msgid "Wednesday"
+msgstr "Серада"
+
+msgid "Thursday"
+msgstr "Чацьвер"
+
+msgid "Friday"
+msgstr "Пятніца"
+
+msgid "Saturday"
+msgstr "Субота"
+
+msgid "Sunday"
+msgstr "Нядзеля"
+
+msgid "Mon"
+msgstr "Пн"
+
+msgid "Tue"
+msgstr "Аў"
+
+msgid "Wed"
+msgstr "Ср"
+
+msgid "Thu"
+msgstr "Чц"
+
+msgid "Fri"
+msgstr "Пт"
+
+msgid "Sat"
+msgstr "Сб"
+
+msgid "Sun"
+msgstr "Нд"
+
+msgid "January"
+msgstr "студзеня"
+
+msgid "February"
+msgstr "лютага"
+
+msgid "March"
+msgstr "сакавік"
+
+msgid "April"
+msgstr "красавіка"
+
+msgid "May"
+msgstr "траўня"
+
+msgid "June"
+msgstr "чэрвеня"
+
+msgid "July"
+msgstr "ліпеня"
+
+msgid "August"
+msgstr "жніўня"
+
+msgid "September"
+msgstr "верасьня"
+
+msgid "October"
+msgstr "кастрычніка"
+
+msgid "November"
+msgstr "лістапада"
+
+msgid "December"
+msgstr "сьнежня"
+
+msgid "jan"
+msgstr "сту"
+
+msgid "feb"
+msgstr "лют"
+
+msgid "mar"
+msgstr "сак"
+
+msgid "apr"
+msgstr "кра"
+
+msgid "may"
+msgstr "тра"
+
+msgid "jun"
+msgstr "чэр"
+
+msgid "jul"
+msgstr "ліп"
+
+msgid "aug"
+msgstr "жні"
+
+msgid "sep"
+msgstr "вер"
+
+msgid "oct"
+msgstr "кас"
+
+msgid "nov"
+msgstr "ліс"
+
+msgid "dec"
+msgstr "сьн"
+
+msgctxt "abbrev. month"
+msgid "Jan."
+msgstr "Сту."
+
+msgctxt "abbrev. month"
+msgid "Feb."
+msgstr "Люты"
+
+msgctxt "abbrev. month"
+msgid "March"
+msgstr "сакавік"
+
+msgctxt "abbrev. month"
+msgid "April"
+msgstr "красавіка"
+
+msgctxt "abbrev. month"
+msgid "May"
+msgstr "траўня"
+
+msgctxt "abbrev. month"
+msgid "June"
+msgstr "чэрвеня"
+
+msgctxt "abbrev. month"
+msgid "July"
+msgstr "ліпеня"
+
+msgctxt "abbrev. month"
+msgid "Aug."
+msgstr "Жні."
+
+msgctxt "abbrev. month"
+msgid "Sept."
+msgstr "Вер."
+
+msgctxt "abbrev. month"
+msgid "Oct."
+msgstr "Кас."
+
+msgctxt "abbrev. month"
+msgid "Nov."
+msgstr "Ліс."
+
+msgctxt "abbrev. month"
+msgid "Dec."
+msgstr "Сьн."
+
+msgctxt "alt. month"
+msgid "January"
+msgstr "студзеня"
+
+msgctxt "alt. month"
+msgid "February"
+msgstr "лютага"
+
+msgctxt "alt. month"
+msgid "March"
+msgstr "сакавік"
+
+msgctxt "alt. month"
+msgid "April"
+msgstr "красавіка"
+
+msgctxt "alt. month"
+msgid "May"
+msgstr "траўня"
+
+msgctxt "alt. month"
+msgid "June"
+msgstr "чэрвеня"
+
+msgctxt "alt. month"
+msgid "July"
+msgstr "ліпеня"
+
+msgctxt "alt. month"
+msgid "August"
+msgstr "жніўня"
+
+msgctxt "alt. month"
+msgid "September"
+msgstr "верасьня"
+
+msgctxt "alt. month"
+msgid "October"
+msgstr "кастрычніка"
+
+msgctxt "alt. month"
+msgid "November"
+msgstr "лістапада"
+
+msgctxt "alt. month"
+msgid "December"
+msgstr "сьнежня"
+
+msgid "This is not a valid IPv6 address."
+msgstr "Гэта ня правільны адрас IPv6."
+
+#, python-format
+msgctxt "String to return when truncating text"
+msgid "%(truncated_text)s…"
+msgstr "%(truncated_text)s…"
+
+msgid "or"
+msgstr "або"
+
+#. Translators: This string is used as a separator between list elements
+msgid ", "
+msgstr ", "
+
+#, python-format
+msgid "%(num)d year"
+msgid_plural "%(num)d years"
+msgstr[0] "%(num)d год"
+msgstr[1] "%(num)d гадоў"
+msgstr[2] "%(num)d гадоў"
+msgstr[3] "%(num)d гадоў"
+
+#, python-format
+msgid "%(num)d month"
+msgid_plural "%(num)d months"
+msgstr[0] "%(num)d месяц"
+msgstr[1] "%(num)d месяцаў"
+msgstr[2] "%(num)d месяцаў"
+msgstr[3] "%(num)d месяцаў"
+
+#, python-format
+msgid "%(num)d week"
+msgid_plural "%(num)d weeks"
+msgstr[0] "%(num)d тыдзень"
+msgstr[1] "%(num)d тыдняў"
+msgstr[2] "%(num)d тыдняў"
+msgstr[3] "%(num)d тыдняў"
+
+#, python-format
+msgid "%(num)d day"
+msgid_plural "%(num)d days"
+msgstr[0] "%(num)d дзень"
+msgstr[1] "%(num)d дзён"
+msgstr[2] "%(num)d дзён"
+msgstr[3] "%(num)d дзён"
+
+#, python-format
+msgid "%(num)d hour"
+msgid_plural "%(num)d hours"
+msgstr[0] "%(num)d гадзіна"
+msgstr[1] "%(num)d гадзін"
+msgstr[2] "%(num)d гадзін"
+msgstr[3] "%(num)d гадзін"
+
+#, python-format
+msgid "%(num)d minute"
+msgid_plural "%(num)d minutes"
+msgstr[0] "%(num)d хвіліна"
+msgstr[1] "%(num)d хвілін"
+msgstr[2] "%(num)d хвілін"
+msgstr[3] "%(num)d хвілін"
+
+msgid "Forbidden"
+msgstr "Забаронена"
+
+msgid "CSRF verification failed. Request aborted."
+msgstr "CSRF-праверка не атрымалася. Запыт спынены."
+
+msgid ""
+"You are seeing this message because this HTTPS site requires a “Referer "
+"header” to be sent by your web browser, but none was sent. This header is "
+"required for security reasons, to ensure that your browser is not being "
+"hijacked by third parties."
+msgstr ""
+"Вы бачыце гэта паведамленне, таму што гэты HTTPS-сайт патрабуе каб Referer "
+"загаловак быў адасланы вашым аглядальнікам, але гэтага не адбылося. Гэты "
+"загаловак неабходны для бяспекі, каб пераканацца, што ваш аглядальнік не "
+"ўзаламаны трэцімі асобамі."
+
+msgid ""
+"If you have configured your browser to disable “Referer” headers, please re-"
+"enable them, at least for this site, or for HTTPS connections, or for “same-"
+"origin” requests."
+msgstr ""
+"Калі вы сканфігуравалі ваш браўзэр так, каб ён не працаваў з “Referer” "
+"загалоўкамі, калі ласка дазвольце іх хаця б для гэтага сайту, ці для HTTPS "
+"злучэнняў, ці для 'same-origin' запытаў."
+
+msgid ""
+"If you are using the tag or "
+"including the “Referrer-Policy: no-referrer” header, please remove them. The "
+"CSRF protection requires the “Referer” header to do strict referer checking. "
+"If you’re concerned about privacy, use alternatives like for links to third-party sites."
+msgstr ""
+"Калі вы выкарыстоўваеце тэг "
+"ці дадалі загаловак “Referrer-Policy: no-referrer”, калі ласка выдаліце іх. "
+"CSRF абароне неабходны “Referer” загаловак для строгай праверкі. Калі Вы "
+"турбуецеся аб прыватнасці, выкарыстоўвайце альтэрнатывы, напрыклад , для спасылкі на сайты трэціх асоб."
+
+msgid ""
+"You are seeing this message because this site requires a CSRF cookie when "
+"submitting forms. This cookie is required for security reasons, to ensure "
+"that your browser is not being hijacked by third parties."
+msgstr ""
+"Вы бачыце гэта паведамленне, таму што гэты сайт патрабуе CSRF кукі для "
+"адсылкі формы. Гэтыя кукі неабходныя для бяспекі, каб пераканацца, што ваш "
+"браўзэр не ўзламаны трэцімі асобамі."
+
+msgid ""
+"If you have configured your browser to disable cookies, please re-enable "
+"them, at least for this site, or for “same-origin” requests."
+msgstr ""
+"Калі вы сканфігуравалі ваш браўзэр так, каб ён не працаваў з кукамі, калі "
+"ласка дазвольце іх хаця б для гэтага сайту ці для “same-origin” запытаў."
+
+msgid "More information is available with DEBUG=True."
+msgstr "Больш падрабязная інфармацыя даступная з DEBUG=True."
+
+msgid "No year specified"
+msgstr "Не пазначылі год"
+
+msgid "Date out of range"
+msgstr "Дата выходзіць за межы дыяпазону"
+
+msgid "No month specified"
+msgstr "Не пазначылі месяц"
+
+msgid "No day specified"
+msgstr "Не пазначылі дзень"
+
+msgid "No week specified"
+msgstr "Не пазначылі тыдзень"
+
+#, python-format
+msgid "No %(verbose_name_plural)s available"
+msgstr "Няма доступу да %(verbose_name_plural)s"
+
+#, python-format
+msgid ""
+"Future %(verbose_name_plural)s not available because %(class_name)s."
+"allow_future is False."
+msgstr ""
+"Няма доступу да %(verbose_name_plural)s, якія будуць, бо «%(class_name)s."
+"allow_future» мае значэньне «не сапраўдна»."
+
+#, python-format
+msgid "Invalid date string “%(datestr)s” given format “%(format)s”"
+msgstr "Радок даты “%(datestr)s” не адпавядае выгляду “%(format)s”"
+
+#, python-format
+msgid "No %(verbose_name)s found matching the query"
+msgstr "Па запыце не знайшлі ніводнага %(verbose_name)s"
+
+msgid "Page is not “last”, nor can it be converted to an int."
+msgstr ""
+"Нумар бачыны ня мае значэньня “last” і яго нельга ператварыць у цэлы лік."
+
+#, python-format
+msgid "Invalid page (%(page_number)s): %(message)s"
+msgstr "Няправільная старонка (%(page_number)s): %(message)s"
+
+#, python-format
+msgid "Empty list and “%(class_name)s.allow_empty” is False."
+msgstr ""
+"Сьпіс парожні, але “%(class_name)s.allow_empty” мае значэньне «не "
+"сапраўдна», што забараняе паказваць парожнія сьпісы."
+
+msgid "Directory indexes are not allowed here."
+msgstr "Не дазваляецца глядзець сьпіс файлаў каталёґа."
+
+#, python-format
+msgid "“%(path)s” does not exist"
+msgstr "“%(path)s” не існуе"
+
+#, python-format
+msgid "Index of %(directory)s"
+msgstr "Файлы каталёґа «%(directory)s»"
+
+msgid "The install worked successfully! Congratulations!"
+msgstr "Усталяванне прайшло паспяхова! Віншаванні!"
+
+#, python-format
+msgid ""
+"View release notes for Django %(version)s"
+msgstr ""
+"Паглядзець заўвагі да выпуску для Джангі "
+"%(version)s"
+
+#, python-format
+msgid ""
+"You are seeing this page because DEBUG=True is in your settings file and you have not "
+"configured any URLs."
+msgstr ""
+"Вы бачыце гэту старонку таму што DEBUG=True у вашым файле налад і вы не "
+"сканфігурыравалі ніякіх URL."
+
+msgid "Django Documentation"
+msgstr "Дакументацыя Джангі"
+
+msgid "Topics, references, & how-to’s"
+msgstr "Тэмы, спасылкі, & як зрабіць"
+
+msgid "Tutorial: A Polling App"
+msgstr "Падручнік: Дадатак для галасавання"
+
+msgid "Get started with Django"
+msgstr "Пачніце з Джангаю"
+
+msgid "Django Community"
+msgstr "Джанга супольнасць"
+
+msgid "Connect, get help, or contribute"
+msgstr "Злучайцеся, атрымлівайце дапамогу, ці спрыяйце"
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bg/LC_MESSAGES/django.mo b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bg/LC_MESSAGES/django.mo
new file mode 100644
index 00000000..fb488b01
Binary files /dev/null and b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bg/LC_MESSAGES/django.mo differ
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bg/LC_MESSAGES/django.po b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bg/LC_MESSAGES/django.po
new file mode 100644
index 00000000..de5ffbf1
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bg/LC_MESSAGES/django.po
@@ -0,0 +1,1328 @@
+# This file is distributed under the same license as the Django package.
+#
+# Translators:
+# arneatec , 2022
+# Boris Chervenkov , 2012
+# Claude Paroz , 2020
+# Jannis Leidel , 2011
+# Lyuboslav Petrov , 2014
+# Todor Lubenov , 2013-2015
+# Venelin Stoykov , 2015-2017
+# vestimir , 2014
+# Alexander Atanasov , 2012
+msgid ""
+msgstr ""
+"Project-Id-Version: django\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2022-05-17 05:23-0500\n"
+"PO-Revision-Date: 2022-05-25 06:49+0000\n"
+"Last-Translator: arneatec , 2022\n"
+"Language-Team: Bulgarian (http://www.transifex.com/django/django/language/"
+"bg/)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Language: bg\n"
+"Plural-Forms: nplurals=2; plural=(n != 1);\n"
+
+msgid "Afrikaans"
+msgstr "африкаански"
+
+msgid "Arabic"
+msgstr "арабски език"
+
+msgid "Algerian Arabic"
+msgstr "алжирски арабски"
+
+msgid "Asturian"
+msgstr "Астурийски"
+
+msgid "Azerbaijani"
+msgstr "Азербайджански език"
+
+msgid "Bulgarian"
+msgstr "български език"
+
+msgid "Belarusian"
+msgstr "Беларуски"
+
+msgid "Bengali"
+msgstr "бенгалски език"
+
+msgid "Breton"
+msgstr "Бретон"
+
+msgid "Bosnian"
+msgstr "босненски език"
+
+msgid "Catalan"
+msgstr "каталански"
+
+msgid "Czech"
+msgstr "чешки"
+
+msgid "Welsh"
+msgstr "уелски"
+
+msgid "Danish"
+msgstr "датски"
+
+msgid "German"
+msgstr "немски"
+
+msgid "Lower Sorbian"
+msgstr "долносорбски"
+
+msgid "Greek"
+msgstr "гръцки"
+
+msgid "English"
+msgstr "английски"
+
+msgid "Australian English"
+msgstr "австралийски английски"
+
+msgid "British English"
+msgstr "британски английски"
+
+msgid "Esperanto"
+msgstr "есперанто"
+
+msgid "Spanish"
+msgstr "испански"
+
+msgid "Argentinian Spanish"
+msgstr "кастилски"
+
+msgid "Colombian Spanish"
+msgstr "колумбийски испански"
+
+msgid "Mexican Spanish"
+msgstr "мексикански испански"
+
+msgid "Nicaraguan Spanish"
+msgstr "никарагуански испански"
+
+msgid "Venezuelan Spanish"
+msgstr "венецуелски испански"
+
+msgid "Estonian"
+msgstr "естонски"
+
+msgid "Basque"
+msgstr "баски"
+
+msgid "Persian"
+msgstr "персийски"
+
+msgid "Finnish"
+msgstr "финландски"
+
+msgid "French"
+msgstr "френски"
+
+msgid "Frisian"
+msgstr "фризийски"
+
+msgid "Irish"
+msgstr "ирландски"
+
+msgid "Scottish Gaelic"
+msgstr "шотландски галски"
+
+msgid "Galician"
+msgstr "галицейски"
+
+msgid "Hebrew"
+msgstr "иврит"
+
+msgid "Hindi"
+msgstr "хинди"
+
+msgid "Croatian"
+msgstr "хърватски"
+
+msgid "Upper Sorbian"
+msgstr "горносорбски"
+
+msgid "Hungarian"
+msgstr "унгарски"
+
+msgid "Armenian"
+msgstr "арменски"
+
+msgid "Interlingua"
+msgstr "интерлингва"
+
+msgid "Indonesian"
+msgstr "индонезийски"
+
+msgid "Igbo"
+msgstr "игбо"
+
+msgid "Ido"
+msgstr "идо"
+
+msgid "Icelandic"
+msgstr "исландски"
+
+msgid "Italian"
+msgstr "италиански"
+
+msgid "Japanese"
+msgstr "японски"
+
+msgid "Georgian"
+msgstr "грузински"
+
+msgid "Kabyle"
+msgstr "кабилски"
+
+msgid "Kazakh"
+msgstr "казахски"
+
+msgid "Khmer"
+msgstr "кхмерски"
+
+msgid "Kannada"
+msgstr "каннада"
+
+msgid "Korean"
+msgstr "корейски"
+
+msgid "Kyrgyz"
+msgstr "киргизки"
+
+msgid "Luxembourgish"
+msgstr "люксембургски"
+
+msgid "Lithuanian"
+msgstr "литовски"
+
+msgid "Latvian"
+msgstr "латвийски"
+
+msgid "Macedonian"
+msgstr "македонски"
+
+msgid "Malayalam"
+msgstr "малаялам"
+
+msgid "Mongolian"
+msgstr "монголски"
+
+msgid "Marathi"
+msgstr "марати"
+
+msgid "Malay"
+msgstr "малайски"
+
+msgid "Burmese"
+msgstr "бирмански"
+
+msgid "Norwegian Bokmål"
+msgstr "норвежки букмол"
+
+msgid "Nepali"
+msgstr "непалски"
+
+msgid "Dutch"
+msgstr "нидерландски"
+
+msgid "Norwegian Nynorsk"
+msgstr "съвременен норвежки"
+
+msgid "Ossetic"
+msgstr "осетски"
+
+msgid "Punjabi"
+msgstr "панджабски"
+
+msgid "Polish"
+msgstr "полски"
+
+msgid "Portuguese"
+msgstr "португалски"
+
+msgid "Brazilian Portuguese"
+msgstr "бразилски португалски"
+
+msgid "Romanian"
+msgstr "румънски"
+
+msgid "Russian"
+msgstr "руски"
+
+msgid "Slovak"
+msgstr "словашки"
+
+msgid "Slovenian"
+msgstr "словенски"
+
+msgid "Albanian"
+msgstr "албански"
+
+msgid "Serbian"
+msgstr "сръбски"
+
+msgid "Serbian Latin"
+msgstr "сръбски - латиница"
+
+msgid "Swedish"
+msgstr "шведски"
+
+msgid "Swahili"
+msgstr "суахили"
+
+msgid "Tamil"
+msgstr "тамилски"
+
+msgid "Telugu"
+msgstr "телугу"
+
+msgid "Tajik"
+msgstr "таджикски"
+
+msgid "Thai"
+msgstr "тайландски"
+
+msgid "Turkmen"
+msgstr "туркменски"
+
+msgid "Turkish"
+msgstr "турски"
+
+msgid "Tatar"
+msgstr "татарски"
+
+msgid "Udmurt"
+msgstr "удмурт"
+
+msgid "Ukrainian"
+msgstr "украински"
+
+msgid "Urdu"
+msgstr "урду"
+
+msgid "Uzbek"
+msgstr "узбекски"
+
+msgid "Vietnamese"
+msgstr "виетнамски"
+
+msgid "Simplified Chinese"
+msgstr "китайски"
+
+msgid "Traditional Chinese"
+msgstr "традиционен китайски"
+
+msgid "Messages"
+msgstr "Съобщения"
+
+msgid "Site Maps"
+msgstr "Карти на сайта"
+
+msgid "Static Files"
+msgstr "Статични файлове"
+
+msgid "Syndication"
+msgstr "Синдикация"
+
+#. Translators: String used to replace omitted page numbers in elided page
+#. range generated by paginators, e.g. [1, 2, '…', 5, 6, 7, '…', 9, 10].
+msgid "…"
+msgstr "..."
+
+msgid "That page number is not an integer"
+msgstr "Номерът на страницата не е цяло число"
+
+msgid "That page number is less than 1"
+msgstr "Номерът на страницата е по-малък от 1"
+
+msgid "That page contains no results"
+msgstr "В тази страница няма резултати"
+
+msgid "Enter a valid value."
+msgstr "Въведете валидна стойност. "
+
+msgid "Enter a valid URL."
+msgstr "Въведете валиден URL адрес."
+
+msgid "Enter a valid integer."
+msgstr "Въведете валидно целочислено число."
+
+msgid "Enter a valid email address."
+msgstr "Въведете валиден имейл адрес."
+
+#. Translators: "letters" means latin letters: a-z and A-Z.
+msgid ""
+"Enter a valid “slug” consisting of letters, numbers, underscores or hyphens."
+msgstr ""
+"Въведете валиден 'слъг', състоящ се от букви, цифри, тирета или долни тирета."
+
+msgid ""
+"Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or "
+"hyphens."
+msgstr ""
+"Въведете валиден 'слъг', състоящ се от Уникод букви, цифри, тирета или долни "
+"тирета."
+
+msgid "Enter a valid IPv4 address."
+msgstr "Въведете валиден IPv4 адрес."
+
+msgid "Enter a valid IPv6 address."
+msgstr "Въведете валиден IPv6 адрес."
+
+msgid "Enter a valid IPv4 or IPv6 address."
+msgstr "Въведете валиден IPv4 или IPv6 адрес."
+
+msgid "Enter only digits separated by commas."
+msgstr "Въведете само еднозначни числа, разделени със запетая. "
+
+#, python-format
+msgid "Ensure this value is %(limit_value)s (it is %(show_value)s)."
+msgstr "Уверете се, че тази стойност е %(limit_value)s (тя е %(show_value)s)."
+
+#, python-format
+msgid "Ensure this value is less than or equal to %(limit_value)s."
+msgstr "Уверете се, че тази стойност е по-малка или равна на %(limit_value)s ."
+
+#, python-format
+msgid "Ensure this value is greater than or equal to %(limit_value)s."
+msgstr ""
+"Уверете се, че тази стойност е по-голяма или равна на %(limit_value)s ."
+
+#, python-format
+msgid "Ensure this value is a multiple of step size %(limit_value)s."
+msgstr "Уверете се, че стойността е кратна на стъпката %(limit_value)s."
+
+#, python-format
+msgid ""
+"Ensure this value has at least %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at least %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+"Уверете се, че тази стойност е най-малко %(limit_value)d знака (тя има "
+"%(show_value)d )."
+msgstr[1] ""
+"Уверете се, че тази стойност е най-малко %(limit_value)d знака (тя има "
+"%(show_value)d)."
+
+#, python-format
+msgid ""
+"Ensure this value has at most %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at most %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+"Уверете се, тази стойност има най-много %(limit_value)d знака (тя има "
+"%(show_value)d)."
+msgstr[1] ""
+"Уверете се, че тази стойност има най-много %(limit_value)d знака (тя има "
+"%(show_value)d)."
+
+msgid "Enter a number."
+msgstr "Въведете число."
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s digit in total."
+msgid_plural "Ensure that there are no more than %(max)s digits in total."
+msgstr[0] "Уверете се, че има не повече от %(max)s цифри общо."
+msgstr[1] "Уверете се, че има не повече от %(max)s цифри общо."
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s decimal place."
+msgid_plural "Ensure that there are no more than %(max)s decimal places."
+msgstr[0] ""
+"Уверете се, че има не повече от%(max)s знак след десетичната запетая."
+msgstr[1] ""
+"Уверете се, че има не повече от %(max)s знака след десетичната запетая."
+
+#, python-format
+msgid ""
+"Ensure that there are no more than %(max)s digit before the decimal point."
+msgid_plural ""
+"Ensure that there are no more than %(max)s digits before the decimal point."
+msgstr[0] ""
+"Уверете се, че има не повече от %(max)s цифра преди десетичната запетая."
+msgstr[1] ""
+"Уверете се, че има не повече от %(max)s цифри преди десетичната запетая."
+
+#, python-format
+msgid ""
+"File extension “%(extension)s” is not allowed. Allowed extensions are: "
+"%(allowed_extensions)s."
+msgstr ""
+"Не са разрешени файлове с раширение \"%(extension)s\". Позволените "
+"разширения са: %(allowed_extensions)s."
+
+msgid "Null characters are not allowed."
+msgstr "Празни знаци не са разрешени."
+
+msgid "and"
+msgstr "и"
+
+#, python-format
+msgid "%(model_name)s with this %(field_labels)s already exists."
+msgstr "%(model_name)s с този %(field_labels)s вече съществува."
+
+#, python-format
+msgid "Constraint “%(name)s” is violated."
+msgstr "Ограничението “%(name)s” е нарушено."
+
+#, python-format
+msgid "Value %(value)r is not a valid choice."
+msgstr "Стойността %(value)r не е валиден избор."
+
+msgid "This field cannot be null."
+msgstr "Това поле не може да има празна стойност."
+
+msgid "This field cannot be blank."
+msgstr "Това поле не може да е празно."
+
+#, python-format
+msgid "%(model_name)s with this %(field_label)s already exists."
+msgstr "%(model_name)s с този %(field_label)s вече съществува."
+
+#. Translators: The 'lookup_type' is one of 'date', 'year' or
+#. 'month'. Eg: "Title must be unique for pub_date year"
+#, python-format
+msgid ""
+"%(field_label)s must be unique for %(date_field_label)s %(lookup_type)s."
+msgstr ""
+"%(field_label)s трябва да е уникално за %(date_field_label)s %(lookup_type)s."
+
+#, python-format
+msgid "Field of type: %(field_type)s"
+msgstr "Поле от тип: %(field_type)s"
+
+#, python-format
+msgid "“%(value)s” value must be either True or False."
+msgstr "Стойността на \"%(value)s\" трябва да бъде или True, или False."
+
+#, python-format
+msgid "“%(value)s” value must be either True, False, or None."
+msgstr "Стойност \"%(value)s\" трябва да бъде или True, или False или None."
+
+msgid "Boolean (Either True or False)"
+msgstr "Булево (True или False)"
+
+#, python-format
+msgid "String (up to %(max_length)s)"
+msgstr "Символен низ (до %(max_length)s символа)"
+
+msgid "Comma-separated integers"
+msgstr "Цели числа, разделени с запетая"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid date format. It must be in YYYY-MM-DD "
+"format."
+msgstr ""
+"Стойността \"%(value)s\" е с невалиден формат за дата. Тя трябва да бъде в "
+"ГГГГ-ММ-ДД формат."
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD) but it is an invalid "
+"date."
+msgstr ""
+"Стойността \"%(value)s\" е в правилния формат (ГГГГ-ММ-ДД), но самата дата е "
+"невалидна."
+
+msgid "Date (without time)"
+msgstr "Дата (без час)"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in YYYY-MM-DD HH:MM[:ss[."
+"uuuuuu]][TZ] format."
+msgstr ""
+"Стойността '%(value)s' е с невалиден формат. Трябва да бъде във формат ГГГГ-"
+"ММ-ДД ЧЧ:ММ[:сс[.uuuuuu]][TZ]"
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD HH:MM[:ss[.uuuuuu]]"
+"[TZ]) but it is an invalid date/time."
+msgstr ""
+"Стойността '%(value)s' е с правилен формат ( ГГГГ-ММ-ДД ЧЧ:ММ[:сс[.μμμμμμ]]"
+"[TZ]), но датата/часът са невалидни"
+
+msgid "Date (with time)"
+msgstr "Дата (и час)"
+
+#, python-format
+msgid "“%(value)s” value must be a decimal number."
+msgstr "Стойността \"%(value)s\" трябва да е десетично число."
+
+msgid "Decimal number"
+msgstr "Десетична дроб"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in [DD] [[HH:]MM:]ss[."
+"uuuuuu] format."
+msgstr ""
+"Стойността “%(value)s” е с невалиден формат. Трябва да бъде във формат [ДД] "
+"[[ЧЧ:]ММ:]сс[.uuuuuu] format."
+
+msgid "Duration"
+msgstr "Продължителност"
+
+msgid "Email address"
+msgstr "Имейл адрес"
+
+msgid "File path"
+msgstr "Път към файл"
+
+#, python-format
+msgid "“%(value)s” value must be a float."
+msgstr "Стойността '%(value)s' трябва да е число с плаваща запетая."
+
+msgid "Floating point number"
+msgstr "Число с плаваща запетая"
+
+#, python-format
+msgid "“%(value)s” value must be an integer."
+msgstr "Стойността \"%(value)s\" трябва да е цяло число."
+
+msgid "Integer"
+msgstr "Цяло число"
+
+msgid "Big (8 byte) integer"
+msgstr "Голямо (8 байта) цяло число"
+
+msgid "Small integer"
+msgstr "2 байта цяло число"
+
+msgid "IPv4 address"
+msgstr "IPv4 адрес"
+
+msgid "IP address"
+msgstr "IP адрес"
+
+#, python-format
+msgid "“%(value)s” value must be either None, True or False."
+msgstr "Стойността '%(value)s' трябва да бъде None, True или False."
+
+msgid "Boolean (Either True, False or None)"
+msgstr "булев (възможните стойности са True, False или None)"
+
+msgid "Positive big integer"
+msgstr "Положително голямо цяло число."
+
+msgid "Positive integer"
+msgstr "Положително цяло число"
+
+msgid "Positive small integer"
+msgstr "Положително 2 байта цяло число"
+
+#, python-format
+msgid "Slug (up to %(max_length)s)"
+msgstr "Слъг (до %(max_length)s )"
+
+msgid "Text"
+msgstr "Текст"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in HH:MM[:ss[.uuuuuu]] "
+"format."
+msgstr ""
+"Стойността \"%(value)s\" е с невалиден формат. Тя трябва да бъде в ЧЧ:ММ [:"
+"сс[.μμμμμμ]]"
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (HH:MM[:ss[.uuuuuu]]) but it is an "
+"invalid time."
+msgstr ""
+"Стойността \"%(value)s\" е в правилния формат (ЧЧ:ММ [:сс[.μμμμμμ]]), но "
+"часът е невалиден."
+
+msgid "Time"
+msgstr "Време"
+
+msgid "URL"
+msgstr "URL адрес"
+
+msgid "Raw binary data"
+msgstr "сурови двоични данни"
+
+#, python-format
+msgid "“%(value)s” is not a valid UUID."
+msgstr "\"%(value)s\" не е валиден UUID."
+
+msgid "Universally unique identifier"
+msgstr "Универсално уникален идентификатор"
+
+msgid "File"
+msgstr "Файл"
+
+msgid "Image"
+msgstr "Изображение"
+
+msgid "A JSON object"
+msgstr "Обект във формат JSON"
+
+msgid "Value must be valid JSON."
+msgstr "Стойността трябва да е валиден JSON."
+
+#, python-format
+msgid "%(model)s instance with %(field)s %(value)r does not exist."
+msgstr "Инстанция на %(model)s с %(field)s %(value)r не съществува."
+
+msgid "Foreign Key (type determined by related field)"
+msgstr "Външен ключ (тип, определен от свързаното поле)"
+
+msgid "One-to-one relationship"
+msgstr "едно-към-едно релация "
+
+#, python-format
+msgid "%(from)s-%(to)s relationship"
+msgstr "%(from)s-%(to)s релация"
+
+#, python-format
+msgid "%(from)s-%(to)s relationships"
+msgstr "%(from)s-%(to)s релации"
+
+msgid "Many-to-many relationship"
+msgstr "Много-към-много релация"
+
+#. Translators: If found as last label character, these punctuation
+#. characters will prevent the default label_suffix to be appended to the
+#. label
+msgid ":?.!"
+msgstr ":?.!"
+
+msgid "This field is required."
+msgstr "Това поле е задължително."
+
+msgid "Enter a whole number."
+msgstr "Въведете цяло число. "
+
+msgid "Enter a valid date."
+msgstr "Въведете валидна дата."
+
+msgid "Enter a valid time."
+msgstr "Въведете валиден час."
+
+msgid "Enter a valid date/time."
+msgstr "Въведете валидна дата/час. "
+
+msgid "Enter a valid duration."
+msgstr "Въведете валидна продължителност."
+
+#, python-brace-format
+msgid "The number of days must be between {min_days} and {max_days}."
+msgstr "Броят на дните трябва да е между {min_days} и {max_days}."
+
+msgid "No file was submitted. Check the encoding type on the form."
+msgstr "Няма изпратен файл. Проверете типа кодиране на формата. "
+
+msgid "No file was submitted."
+msgstr "Няма изпратен файл."
+
+msgid "The submitted file is empty."
+msgstr "Изпратеният файл е празен. "
+
+#, python-format
+msgid "Ensure this filename has at most %(max)d character (it has %(length)d)."
+msgid_plural ""
+"Ensure this filename has at most %(max)d characters (it has %(length)d)."
+msgstr[0] "Уверете се, това име е най-много %(max)d знака (то има %(length)d)."
+msgstr[1] ""
+"Уверете се, че това файлово име има най-много %(max)d знаци (има "
+"%(length)d)."
+
+msgid "Please either submit a file or check the clear checkbox, not both."
+msgstr ""
+"Моля, или пратете файл или маркирайте полето за изчистване, но не и двете."
+
+msgid ""
+"Upload a valid image. The file you uploaded was either not an image or a "
+"corrupted image."
+msgstr ""
+"Качете валидно изображение. Файлът, който сте качили или не е изображение, "
+"или е повреден. "
+
+#, python-format
+msgid "Select a valid choice. %(value)s is not one of the available choices."
+msgstr "Направете валиден избор. %(value)s не е един от възможните избори."
+
+msgid "Enter a list of values."
+msgstr "Въведете списък от стойности"
+
+msgid "Enter a complete value."
+msgstr "Въведете пълна стойност."
+
+msgid "Enter a valid UUID."
+msgstr "Въведете валиден UUID."
+
+msgid "Enter a valid JSON."
+msgstr "Въведете валиден JSON."
+
+#. Translators: This is the default suffix added to form field labels
+msgid ":"
+msgstr ":"
+
+#, python-format
+msgid "(Hidden field %(name)s) %(error)s"
+msgstr "(Скрито поле %(name)s) %(error)s"
+
+#, python-format
+msgid ""
+"ManagementForm data is missing or has been tampered with. Missing fields: "
+"%(field_names)s. You may need to file a bug report if the issue persists."
+msgstr ""
+"ManagementForm данните липсват или са променяни неправомерно. Липсващи "
+"полета: %(field_names)s. Трябва да изпратите уведомление за бъг, ако този "
+"проблем продължава."
+
+#, python-format
+msgid "Please submit at most %(num)d form."
+msgid_plural "Please submit at most %(num)d forms."
+msgstr[0] "Моля изпратете не повече от %(num)d формуляр."
+msgstr[1] "Моля изпратете не повече от %(num)d формуляра."
+
+#, python-format
+msgid "Please submit at least %(num)d form."
+msgid_plural "Please submit at least %(num)d forms."
+msgstr[0] "Моля изпратете поне %(num)d формуляр."
+msgstr[1] "Моля изпратете поне %(num)d формуляра."
+
+msgid "Order"
+msgstr "Ред"
+
+msgid "Delete"
+msgstr "Изтрий"
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s."
+msgstr "Моля, коригирайте дублираните данни за %(field)s."
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s, which must be unique."
+msgstr ""
+"Моля, коригирайте дублираните данни за %(field)s, които трябва да са "
+"уникални."
+
+#, python-format
+msgid ""
+"Please correct the duplicate data for %(field_name)s which must be unique "
+"for the %(lookup)s in %(date_field)s."
+msgstr ""
+"Моля, коригирайте дублиранитe данни за %(field_name)s , които трябва да са "
+"уникални за %(lookup)s в %(date_field)s ."
+
+msgid "Please correct the duplicate values below."
+msgstr "Моля, коригирайте повтарящите се стойности по-долу."
+
+msgid "The inline value did not match the parent instance."
+msgstr "Стойността в реда не отговаря на родителската инстанция."
+
+msgid "Select a valid choice. That choice is not one of the available choices."
+msgstr "Направете валиден избор. Този не е един от възможните избори. "
+
+#, python-format
+msgid "“%(pk)s” is not a valid value."
+msgstr "“%(pk)s” не е валидна стойност."
+
+#, python-format
+msgid ""
+"%(datetime)s couldn’t be interpreted in time zone %(current_timezone)s; it "
+"may be ambiguous or it may not exist."
+msgstr ""
+"%(datetime)s не може да се интерпретира в часова зона %(current_timezone)s; "
+"вероятно стойността е нееднозначна или не съществува изобщо."
+
+msgid "Clear"
+msgstr "Изчисти"
+
+msgid "Currently"
+msgstr "Сега"
+
+msgid "Change"
+msgstr "Промени"
+
+msgid "Unknown"
+msgstr "Неизвестно"
+
+msgid "Yes"
+msgstr "Да"
+
+msgid "No"
+msgstr "Не"
+
+#. Translators: Please do not add spaces around commas.
+msgid "yes,no,maybe"
+msgstr "да,не,може би"
+
+#, python-format
+msgid "%(size)d byte"
+msgid_plural "%(size)d bytes"
+msgstr[0] "%(size)d, байт"
+msgstr[1] "%(size)d байта"
+
+#, python-format
+msgid "%s KB"
+msgstr "%s KБ"
+
+#, python-format
+msgid "%s MB"
+msgstr "%s МБ"
+
+#, python-format
+msgid "%s GB"
+msgstr "%s ГБ"
+
+#, python-format
+msgid "%s TB"
+msgstr "%s ТБ"
+
+#, python-format
+msgid "%s PB"
+msgstr "%s ПБ"
+
+msgid "p.m."
+msgstr "след обяд"
+
+msgid "a.m."
+msgstr "преди обяд"
+
+msgid "PM"
+msgstr "след обяд"
+
+msgid "AM"
+msgstr "преди обяд"
+
+msgid "midnight"
+msgstr "полунощ"
+
+msgid "noon"
+msgstr "обяд"
+
+msgid "Monday"
+msgstr "понеделник"
+
+msgid "Tuesday"
+msgstr "вторник"
+
+msgid "Wednesday"
+msgstr "сряда"
+
+msgid "Thursday"
+msgstr "четвъртък"
+
+msgid "Friday"
+msgstr "петък"
+
+msgid "Saturday"
+msgstr "събота"
+
+msgid "Sunday"
+msgstr "неделя"
+
+msgid "Mon"
+msgstr "Пон"
+
+msgid "Tue"
+msgstr "Вт"
+
+msgid "Wed"
+msgstr "Ср"
+
+msgid "Thu"
+msgstr "Чет"
+
+msgid "Fri"
+msgstr "Пет"
+
+msgid "Sat"
+msgstr "Съб"
+
+msgid "Sun"
+msgstr "Нед"
+
+msgid "January"
+msgstr "Януари"
+
+msgid "February"
+msgstr "Февруари"
+
+msgid "March"
+msgstr "Март"
+
+msgid "April"
+msgstr "Април"
+
+msgid "May"
+msgstr "Май"
+
+msgid "June"
+msgstr "Юни"
+
+msgid "July"
+msgstr "Юли"
+
+msgid "August"
+msgstr "Август"
+
+msgid "September"
+msgstr "Септември"
+
+msgid "October"
+msgstr "Октомври"
+
+msgid "November"
+msgstr "Ноември"
+
+msgid "December"
+msgstr "Декември"
+
+msgid "jan"
+msgstr "ян"
+
+msgid "feb"
+msgstr "фев"
+
+msgid "mar"
+msgstr "мар"
+
+msgid "apr"
+msgstr "апр"
+
+msgid "may"
+msgstr "май"
+
+msgid "jun"
+msgstr "юни"
+
+msgid "jul"
+msgstr "юли"
+
+msgid "aug"
+msgstr "авг"
+
+msgid "sep"
+msgstr "сеп"
+
+msgid "oct"
+msgstr "окт"
+
+msgid "nov"
+msgstr "ноем"
+
+msgid "dec"
+msgstr "дек"
+
+msgctxt "abbrev. month"
+msgid "Jan."
+msgstr "Ян."
+
+msgctxt "abbrev. month"
+msgid "Feb."
+msgstr "Фев."
+
+msgctxt "abbrev. month"
+msgid "March"
+msgstr "Март"
+
+msgctxt "abbrev. month"
+msgid "April"
+msgstr "Апр."
+
+msgctxt "abbrev. month"
+msgid "May"
+msgstr "Май"
+
+msgctxt "abbrev. month"
+msgid "June"
+msgstr "Юни"
+
+msgctxt "abbrev. month"
+msgid "July"
+msgstr "Юли"
+
+msgctxt "abbrev. month"
+msgid "Aug."
+msgstr "Авг."
+
+msgctxt "abbrev. month"
+msgid "Sept."
+msgstr "Септ."
+
+msgctxt "abbrev. month"
+msgid "Oct."
+msgstr "Окт."
+
+msgctxt "abbrev. month"
+msgid "Nov."
+msgstr "Ноем."
+
+msgctxt "abbrev. month"
+msgid "Dec."
+msgstr "Дек."
+
+msgctxt "alt. month"
+msgid "January"
+msgstr "Януари"
+
+msgctxt "alt. month"
+msgid "February"
+msgstr "Февруари"
+
+msgctxt "alt. month"
+msgid "March"
+msgstr "Март"
+
+msgctxt "alt. month"
+msgid "April"
+msgstr "Април"
+
+msgctxt "alt. month"
+msgid "May"
+msgstr "Май"
+
+msgctxt "alt. month"
+msgid "June"
+msgstr "Юни"
+
+msgctxt "alt. month"
+msgid "July"
+msgstr "Юли"
+
+msgctxt "alt. month"
+msgid "August"
+msgstr "Август"
+
+msgctxt "alt. month"
+msgid "September"
+msgstr "Септември"
+
+msgctxt "alt. month"
+msgid "October"
+msgstr "Октомври"
+
+msgctxt "alt. month"
+msgid "November"
+msgstr "Ноември"
+
+msgctxt "alt. month"
+msgid "December"
+msgstr "Декември"
+
+msgid "This is not a valid IPv6 address."
+msgstr "Въведете валиден IPv6 адрес."
+
+#, python-format
+msgctxt "String to return when truncating text"
+msgid "%(truncated_text)s…"
+msgstr "%(truncated_text)s…"
+
+msgid "or"
+msgstr "или"
+
+#. Translators: This string is used as a separator between list elements
+msgid ", "
+msgstr ","
+
+#, python-format
+msgid "%(num)d year"
+msgid_plural "%(num)d years"
+msgstr[0] "%(num)d година"
+msgstr[1] "%(num)d години"
+
+#, python-format
+msgid "%(num)d month"
+msgid_plural "%(num)d months"
+msgstr[0] "%(num)d месец"
+msgstr[1] "%(num)d месеца"
+
+#, python-format
+msgid "%(num)d week"
+msgid_plural "%(num)d weeks"
+msgstr[0] "%(num)d седмица"
+msgstr[1] "%(num)d седмици"
+
+#, python-format
+msgid "%(num)d day"
+msgid_plural "%(num)d days"
+msgstr[0] "%(num)d ден"
+msgstr[1] "%(num)d дни"
+
+#, python-format
+msgid "%(num)d hour"
+msgid_plural "%(num)d hours"
+msgstr[0] "%(num)d час"
+msgstr[1] "%(num)d часа"
+
+#, python-format
+msgid "%(num)d minute"
+msgid_plural "%(num)d minutes"
+msgstr[0] "%(num)d минута"
+msgstr[1] "%(num)d минути"
+
+msgid "Forbidden"
+msgstr "Забранен"
+
+msgid "CSRF verification failed. Request aborted."
+msgstr "CSRF проверката се провали. Заявката прекратена."
+
+msgid ""
+"You are seeing this message because this HTTPS site requires a “Referer "
+"header” to be sent by your web browser, but none was sent. This header is "
+"required for security reasons, to ensure that your browser is not being "
+"hijacked by third parties."
+msgstr ""
+"Вие виждате това съобщение, защото този HTTPS сайт изисква да бъде изпратен "
+"'Referer header' от вашият уеб браузър, но такъв не бе изпратен. Този "
+"header е задължителен от съображения за сигурност, за да се гарантира, че "
+"вашият браузър не е компрометиран от трети страни."
+
+msgid ""
+"If you have configured your browser to disable “Referer” headers, please re-"
+"enable them, at least for this site, or for HTTPS connections, or for “same-"
+"origin” requests."
+msgstr ""
+"Ако сте настроили вашия браузър да деактивира 'Referer' headers, моля да ги "
+"активирате отново, поне за този сайт, или за HTTPS връзки, или за 'same-"
+"origin' заявки."
+
+msgid ""
+"If you are using the tag or "
+"including the “Referrer-Policy: no-referrer” header, please remove them. The "
+"CSRF protection requires the “Referer” header to do strict referer checking. "
+"If you’re concerned about privacy, use alternatives like for links to third-party sites."
+msgstr ""
+"Ако използвате таг или "
+"включвате “Referrer-Policy: no-referrer” header, моля премахнете ги. CSRF "
+"защитата изисква “Referer” header, за да извърши стриктна проверка на "
+"изпращача. Ако сте притеснени за поверителността, използвайте алтернативи "
+"като за връзки към сайтове на трети страни."
+
+msgid ""
+"You are seeing this message because this site requires a CSRF cookie when "
+"submitting forms. This cookie is required for security reasons, to ensure "
+"that your browser is not being hijacked by third parties."
+msgstr ""
+"Вие виждате това съобщение, защото този сайт изисква CSRF бисквитка, когато "
+"се подават формуляри. Тази бисквитка е задължителна от съображения за "
+"сигурност, за да се гарантира, че вашият браузър не е компрометиран от трети "
+"страни."
+
+msgid ""
+"If you have configured your browser to disable cookies, please re-enable "
+"them, at least for this site, or for “same-origin” requests."
+msgstr ""
+"Ако сте конфигурирали браузъра си да забрани бисквитките, моля да ги "
+"активирате отново, поне за този сайт, или за \"same-origin\" заявки."
+
+msgid "More information is available with DEBUG=True."
+msgstr "Повече информация е на разположение с DEBUG=True."
+
+msgid "No year specified"
+msgstr "Не е посочена година"
+
+msgid "Date out of range"
+msgstr "Датата е в невалиден диапазон"
+
+msgid "No month specified"
+msgstr "Не е посочен месец"
+
+msgid "No day specified"
+msgstr "Не е посочен ден"
+
+msgid "No week specified"
+msgstr "Не е посочена седмица"
+
+#, python-format
+msgid "No %(verbose_name_plural)s available"
+msgstr "Няма достъпни %(verbose_name_plural)s"
+
+#, python-format
+msgid ""
+"Future %(verbose_name_plural)s not available because %(class_name)s."
+"allow_future is False."
+msgstr ""
+"Бъдещo %(verbose_name_plural)s е недостъпно, тъй като %(class_name)s."
+"allow_future е False."
+
+#, python-format
+msgid "Invalid date string “%(datestr)s” given format “%(format)s”"
+msgstr ""
+"Невалидна текстова стойност на датата “%(datestr)s” при зададен формат "
+"“%(format)s”"
+
+#, python-format
+msgid "No %(verbose_name)s found matching the query"
+msgstr "Няма %(verbose_name)s, съвпадащи със заявката"
+
+msgid "Page is not “last”, nor can it be converted to an int."
+msgstr ""
+"Страницата не е \"последна\", нито може да се преобразува в цяло число."
+
+#, python-format
+msgid "Invalid page (%(page_number)s): %(message)s"
+msgstr "Невалидна страница (%(page_number)s): %(message)s"
+
+#, python-format
+msgid "Empty list and “%(class_name)s.allow_empty” is False."
+msgstr "Празен списък и \"%(class_name)s.allow_empty\" e False."
+
+msgid "Directory indexes are not allowed here."
+msgstr "Тук не е позволено индексиране на директория."
+
+#, python-format
+msgid "“%(path)s” does not exist"
+msgstr "\"%(path)s\" не съществува"
+
+#, python-format
+msgid "Index of %(directory)s"
+msgstr "Индекс %(directory)s"
+
+msgid "The install worked successfully! Congratulations!"
+msgstr "Инсталацията Ви заработи успешно! Поздравления!"
+
+#, python-format
+msgid ""
+"View release notes for Django %(version)s"
+msgstr ""
+"Разгледайте release notes за Django %(version)s"
+
+#, python-format
+msgid ""
+"You are seeing this page because DEBUG=True is in your settings file and you have not "
+"configured any URLs."
+msgstr ""
+"Вие виждате тази страница, защото DEBUG=True е във вашия файл с настройки и не сте "
+"конфигурирали никакви URL-и."
+
+msgid "Django Documentation"
+msgstr "Django документация"
+
+msgid "Topics, references, & how-to’s"
+msgstr "Теми, наръчници, & друга документация"
+
+msgid "Tutorial: A Polling App"
+msgstr "Урок: Приложение за анкета"
+
+msgid "Get started with Django"
+msgstr "Започнете с Django"
+
+msgid "Django Community"
+msgstr "Django общност"
+
+msgid "Connect, get help, or contribute"
+msgstr "Свържете се, получете помощ или допринесете"
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bg/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bg/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bg/formats.py b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bg/formats.py
new file mode 100644
index 00000000..ee90c5b0
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bg/formats.py
@@ -0,0 +1,21 @@
+# This file is distributed under the same license as the Django package.
+#
+# The *_FORMAT strings use the Django date format syntax,
+# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+DATE_FORMAT = "d F Y"
+TIME_FORMAT = "H:i"
+# DATETIME_FORMAT =
+# YEAR_MONTH_FORMAT =
+MONTH_DAY_FORMAT = "j F"
+SHORT_DATE_FORMAT = "d.m.Y"
+# SHORT_DATETIME_FORMAT =
+# FIRST_DAY_OF_WEEK =
+
+# The *_INPUT_FORMATS strings use the Python strftime format syntax,
+# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
+# DATE_INPUT_FORMATS =
+# TIME_INPUT_FORMATS =
+# DATETIME_INPUT_FORMATS =
+DECIMAL_SEPARATOR = ","
+THOUSAND_SEPARATOR = " " # Non-breaking space
+# NUMBER_GROUPING =
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bn/LC_MESSAGES/django.mo b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bn/LC_MESSAGES/django.mo
new file mode 100644
index 00000000..ef52f360
Binary files /dev/null and b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bn/LC_MESSAGES/django.mo differ
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bn/LC_MESSAGES/django.po b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bn/LC_MESSAGES/django.po
new file mode 100644
index 00000000..b554f7a8
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bn/LC_MESSAGES/django.po
@@ -0,0 +1,1218 @@
+# This file is distributed under the same license as the Django package.
+#
+# Translators:
+# Jannis Leidel , 2011
+# M Nasimul Haque , 2013
+# Tahmid Rafi , 2012-2013
+# Tahmid Rafi , 2013
+msgid ""
+msgstr ""
+"Project-Id-Version: django\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2019-09-27 22:40+0200\n"
+"PO-Revision-Date: 2019-11-05 00:38+0000\n"
+"Last-Translator: Ramiro Morales\n"
+"Language-Team: Bengali (http://www.transifex.com/django/django/language/"
+"bn/)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Language: bn\n"
+"Plural-Forms: nplurals=2; plural=(n != 1);\n"
+
+msgid "Afrikaans"
+msgstr "আফ্রিকার অন্যতম সরকারি ভাষা"
+
+msgid "Arabic"
+msgstr "আরবী"
+
+msgid "Asturian"
+msgstr ""
+
+msgid "Azerbaijani"
+msgstr "আজারবাইজানি"
+
+msgid "Bulgarian"
+msgstr "বুলগেরিয়ান"
+
+msgid "Belarusian"
+msgstr "বেলারুশীয়"
+
+msgid "Bengali"
+msgstr "বাংলা"
+
+msgid "Breton"
+msgstr "ব্রেটন"
+
+msgid "Bosnian"
+msgstr "বসনিয়ান"
+
+msgid "Catalan"
+msgstr "ক্যাটালান"
+
+msgid "Czech"
+msgstr "চেক"
+
+msgid "Welsh"
+msgstr "ওয়েল্স"
+
+msgid "Danish"
+msgstr "ড্যানিশ"
+
+msgid "German"
+msgstr "জার্মান"
+
+msgid "Lower Sorbian"
+msgstr ""
+
+msgid "Greek"
+msgstr "গ্রিক"
+
+msgid "English"
+msgstr "ইংলিশ"
+
+msgid "Australian English"
+msgstr ""
+
+msgid "British English"
+msgstr "বৃটিশ ইংলিশ"
+
+msgid "Esperanto"
+msgstr "আন্তর্জাতিক ভাষা"
+
+msgid "Spanish"
+msgstr "স্প্যানিশ"
+
+msgid "Argentinian Spanish"
+msgstr "আর্জেন্টিনিয়ান স্প্যানিশ"
+
+msgid "Colombian Spanish"
+msgstr ""
+
+msgid "Mexican Spanish"
+msgstr "মেক্সিকান স্প্যানিশ"
+
+msgid "Nicaraguan Spanish"
+msgstr "নিকারাগুয়ান স্প্যানিশ"
+
+msgid "Venezuelan Spanish"
+msgstr "ভেনেজুয়েলার স্প্যানিশ"
+
+msgid "Estonian"
+msgstr "এস্তোনিয়ান"
+
+msgid "Basque"
+msgstr "বাস্ক"
+
+msgid "Persian"
+msgstr "ফারসি"
+
+msgid "Finnish"
+msgstr "ফিনিশ"
+
+msgid "French"
+msgstr "ফ্রেঞ্চ"
+
+msgid "Frisian"
+msgstr "ফ্রিজ্ল্যানডের ভাষা"
+
+msgid "Irish"
+msgstr "আইরিশ"
+
+msgid "Scottish Gaelic"
+msgstr ""
+
+msgid "Galician"
+msgstr "গ্যালিসিয়ান"
+
+msgid "Hebrew"
+msgstr "হিব্রু"
+
+msgid "Hindi"
+msgstr "হিন্দী"
+
+msgid "Croatian"
+msgstr "ক্রোয়েশিয়ান"
+
+msgid "Upper Sorbian"
+msgstr ""
+
+msgid "Hungarian"
+msgstr "হাঙ্গেরিয়ান"
+
+msgid "Armenian"
+msgstr ""
+
+msgid "Interlingua"
+msgstr ""
+
+msgid "Indonesian"
+msgstr "ইন্দোনেশিয়ান"
+
+msgid "Ido"
+msgstr ""
+
+msgid "Icelandic"
+msgstr "আইসল্যান্ডিক"
+
+msgid "Italian"
+msgstr "ইটালিয়ান"
+
+msgid "Japanese"
+msgstr "জাপানিজ"
+
+msgid "Georgian"
+msgstr "জর্জিয়ান"
+
+msgid "Kabyle"
+msgstr ""
+
+msgid "Kazakh"
+msgstr "কাজাখ"
+
+msgid "Khmer"
+msgstr "খমার"
+
+msgid "Kannada"
+msgstr "কান্নাড়া"
+
+msgid "Korean"
+msgstr "কোরিয়ান"
+
+msgid "Luxembourgish"
+msgstr "লুক্সেমবার্গীয়"
+
+msgid "Lithuanian"
+msgstr "লিথুয়ানিয়ান"
+
+msgid "Latvian"
+msgstr "লাটভিয়ান"
+
+msgid "Macedonian"
+msgstr "ম্যাসাডোনিয়ান"
+
+msgid "Malayalam"
+msgstr "মালায়ালম"
+
+msgid "Mongolian"
+msgstr "মঙ্গোলিয়ান"
+
+msgid "Marathi"
+msgstr ""
+
+msgid "Burmese"
+msgstr "বার্মিজ"
+
+msgid "Norwegian Bokmål"
+msgstr ""
+
+msgid "Nepali"
+msgstr "নেপালি"
+
+msgid "Dutch"
+msgstr "ডাচ"
+
+msgid "Norwegian Nynorsk"
+msgstr "নরওয়েজীয়ান নিনর্স্ক"
+
+msgid "Ossetic"
+msgstr "অসেটিক"
+
+msgid "Punjabi"
+msgstr "পাঞ্জাবী"
+
+msgid "Polish"
+msgstr "পোলিশ"
+
+msgid "Portuguese"
+msgstr "পর্তুগীজ"
+
+msgid "Brazilian Portuguese"
+msgstr "ব্রাজিলিয়ান পর্তুগীজ"
+
+msgid "Romanian"
+msgstr "রোমানিয়ান"
+
+msgid "Russian"
+msgstr "রাশান"
+
+msgid "Slovak"
+msgstr "স্লোভাক"
+
+msgid "Slovenian"
+msgstr "স্লোভেনিয়ান"
+
+msgid "Albanian"
+msgstr "আলবেনীয়ান"
+
+msgid "Serbian"
+msgstr "সার্বিয়ান"
+
+msgid "Serbian Latin"
+msgstr "সার্বিয়ান ল্যাটিন"
+
+msgid "Swedish"
+msgstr "সুইডিশ"
+
+msgid "Swahili"
+msgstr "সোয়াহিলি"
+
+msgid "Tamil"
+msgstr "তামিল"
+
+msgid "Telugu"
+msgstr "তেলেগু"
+
+msgid "Thai"
+msgstr "থাই"
+
+msgid "Turkish"
+msgstr "তুর্কি"
+
+msgid "Tatar"
+msgstr "তাতারদেশীয়"
+
+msgid "Udmurt"
+msgstr ""
+
+msgid "Ukrainian"
+msgstr "ইউক্রেনিয়ান"
+
+msgid "Urdu"
+msgstr "উর্দু"
+
+msgid "Uzbek"
+msgstr ""
+
+msgid "Vietnamese"
+msgstr "ভিয়েতনামিজ"
+
+msgid "Simplified Chinese"
+msgstr "সরলীকৃত চাইনীজ"
+
+msgid "Traditional Chinese"
+msgstr "প্রচলিত চাইনীজ"
+
+msgid "Messages"
+msgstr ""
+
+msgid "Site Maps"
+msgstr ""
+
+msgid "Static Files"
+msgstr ""
+
+msgid "Syndication"
+msgstr ""
+
+msgid "That page number is not an integer"
+msgstr ""
+
+msgid "That page number is less than 1"
+msgstr ""
+
+msgid "That page contains no results"
+msgstr ""
+
+msgid "Enter a valid value."
+msgstr "একটি বৈধ মান দিন।"
+
+msgid "Enter a valid URL."
+msgstr "বৈধ URL দিন"
+
+msgid "Enter a valid integer."
+msgstr ""
+
+msgid "Enter a valid email address."
+msgstr "একটি বৈধ ইমেইল ঠিকানা লিখুন."
+
+#. Translators: "letters" means latin letters: a-z and A-Z.
+msgid ""
+"Enter a valid “slug” consisting of letters, numbers, underscores or hyphens."
+msgstr ""
+
+msgid ""
+"Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or "
+"hyphens."
+msgstr ""
+
+msgid "Enter a valid IPv4 address."
+msgstr "একটি বৈধ IPv4 ঠিকানা দিন।"
+
+msgid "Enter a valid IPv6 address."
+msgstr "একটি বৈধ IPv6 ঠিকানা টাইপ করুন।"
+
+msgid "Enter a valid IPv4 or IPv6 address."
+msgstr "একটি বৈধ IPv4 অথবা IPv6 ঠিকানা টাইপ করুন।"
+
+msgid "Enter only digits separated by commas."
+msgstr "শুধুমাত্র কমা দিয়ে সংখ্যা দিন।"
+
+#, python-format
+msgid "Ensure this value is %(limit_value)s (it is %(show_value)s)."
+msgstr "সংখ্যাটির মান %(limit_value)s হতে হবে (এটা এখন %(show_value)s আছে)।"
+
+#, python-format
+msgid "Ensure this value is less than or equal to %(limit_value)s."
+msgstr "সংখ্যাটির মান %(limit_value)s এর চেয়ে ছোট বা সমান হতে হবে।"
+
+#, python-format
+msgid "Ensure this value is greater than or equal to %(limit_value)s."
+msgstr "সংখ্যাটির মান %(limit_value)s এর চেয়ে বড় বা সমান হতে হবে।"
+
+#, python-format
+msgid ""
+"Ensure this value has at least %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at least %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+msgstr[1] ""
+
+#, python-format
+msgid ""
+"Ensure this value has at most %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at most %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+msgstr[1] ""
+
+msgid "Enter a number."
+msgstr "একটি সংখ্যা প্রবেশ করান।"
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s digit in total."
+msgid_plural "Ensure that there are no more than %(max)s digits in total."
+msgstr[0] ""
+msgstr[1] ""
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s decimal place."
+msgid_plural "Ensure that there are no more than %(max)s decimal places."
+msgstr[0] ""
+msgstr[1] ""
+
+#, python-format
+msgid ""
+"Ensure that there are no more than %(max)s digit before the decimal point."
+msgid_plural ""
+"Ensure that there are no more than %(max)s digits before the decimal point."
+msgstr[0] ""
+msgstr[1] ""
+
+#, python-format
+msgid ""
+"File extension “%(extension)s” is not allowed. Allowed extensions are: "
+"%(allowed_extensions)s."
+msgstr ""
+
+msgid "Null characters are not allowed."
+msgstr ""
+
+msgid "and"
+msgstr "এবং"
+
+#, python-format
+msgid "%(model_name)s with this %(field_labels)s already exists."
+msgstr ""
+
+#, python-format
+msgid "Value %(value)r is not a valid choice."
+msgstr ""
+
+msgid "This field cannot be null."
+msgstr "এর মান null হতে পারবে না।"
+
+msgid "This field cannot be blank."
+msgstr "এই ফিল্ডের মান ফাঁকা হতে পারে না"
+
+#, python-format
+msgid "%(model_name)s with this %(field_label)s already exists."
+msgstr "%(field_label)s সহ %(model_name)s আরেকটি রয়েছে।"
+
+#. Translators: The 'lookup_type' is one of 'date', 'year' or 'month'.
+#. Eg: "Title must be unique for pub_date year"
+#, python-format
+msgid ""
+"%(field_label)s must be unique for %(date_field_label)s %(lookup_type)s."
+msgstr ""
+
+#, python-format
+msgid "Field of type: %(field_type)s"
+msgstr "ফিল্ডের ধরণ: %(field_type)s"
+
+#, python-format
+msgid "“%(value)s” value must be either True or False."
+msgstr ""
+
+#, python-format
+msgid "“%(value)s” value must be either True, False, or None."
+msgstr ""
+
+msgid "Boolean (Either True or False)"
+msgstr "বুলিয়ান (হয় True অথবা False)"
+
+#, python-format
+msgid "String (up to %(max_length)s)"
+msgstr "স্ট্রিং (সর্বোচ্চ %(max_length)s)"
+
+msgid "Comma-separated integers"
+msgstr "কমা দিয়ে আলাদা করা ইন্টিজার"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid date format. It must be in YYYY-MM-DD "
+"format."
+msgstr ""
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD) but it is an invalid "
+"date."
+msgstr ""
+
+msgid "Date (without time)"
+msgstr "তারিখ (সময় বাদে)"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in YYYY-MM-DD HH:MM[:ss[."
+"uuuuuu]][TZ] format."
+msgstr ""
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD HH:MM[:ss[.uuuuuu]]"
+"[TZ]) but it is an invalid date/time."
+msgstr ""
+
+msgid "Date (with time)"
+msgstr "তারিখ (সময় সহ)"
+
+#, python-format
+msgid "“%(value)s” value must be a decimal number."
+msgstr ""
+
+msgid "Decimal number"
+msgstr "দশমিক সংখ্যা"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in [DD] [[HH:]MM:]ss[."
+"uuuuuu] format."
+msgstr ""
+
+msgid "Duration"
+msgstr ""
+
+msgid "Email address"
+msgstr "ইমেইল ঠিকানা"
+
+msgid "File path"
+msgstr "ফাইল পথ"
+
+#, python-format
+msgid "“%(value)s” value must be a float."
+msgstr ""
+
+msgid "Floating point number"
+msgstr "ফ্লোটিং পয়েন্ট সংখ্যা"
+
+#, python-format
+msgid "“%(value)s” value must be an integer."
+msgstr ""
+
+msgid "Integer"
+msgstr "ইন্টিজার"
+
+msgid "Big (8 byte) integer"
+msgstr "বিগ (৮ বাইট) ইন্টিজার"
+
+msgid "IPv4 address"
+msgstr "IPv4 ঠিকানা"
+
+msgid "IP address"
+msgstr "আইপি ঠিকানা"
+
+#, python-format
+msgid "“%(value)s” value must be either None, True or False."
+msgstr ""
+
+msgid "Boolean (Either True, False or None)"
+msgstr "বুলিয়ান (হয় True, False অথবা None)"
+
+msgid "Positive integer"
+msgstr "পজিটিভ ইন্টিজার"
+
+msgid "Positive small integer"
+msgstr "পজিটিভ স্মল ইন্টিজার"
+
+#, python-format
+msgid "Slug (up to %(max_length)s)"
+msgstr "স্লাগ (সর্বোচ্চ %(max_length)s)"
+
+msgid "Small integer"
+msgstr "স্মল ইন্টিজার"
+
+msgid "Text"
+msgstr "টেক্সট"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in HH:MM[:ss[.uuuuuu]] "
+"format."
+msgstr ""
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (HH:MM[:ss[.uuuuuu]]) but it is an "
+"invalid time."
+msgstr ""
+
+msgid "Time"
+msgstr "সময়"
+
+msgid "URL"
+msgstr "ইউআরএল (URL)"
+
+msgid "Raw binary data"
+msgstr "র বাইনারি ডাটা"
+
+#, python-format
+msgid "“%(value)s” is not a valid UUID."
+msgstr ""
+
+msgid "Universally unique identifier"
+msgstr ""
+
+msgid "File"
+msgstr "ফাইল"
+
+msgid "Image"
+msgstr "ইমেজ"
+
+#, python-format
+msgid "%(model)s instance with %(field)s %(value)r does not exist."
+msgstr ""
+
+msgid "Foreign Key (type determined by related field)"
+msgstr "ফরেন কি (টাইপ রিলেটেড ফিল্ড দ্বারা নির্ণীত হবে)"
+
+msgid "One-to-one relationship"
+msgstr "ওয়ান-টু-ওয়ান রিলেশানশিপ"
+
+#, python-format
+msgid "%(from)s-%(to)s relationship"
+msgstr ""
+
+#, python-format
+msgid "%(from)s-%(to)s relationships"
+msgstr ""
+
+msgid "Many-to-many relationship"
+msgstr "ম্যানি-টু-ম্যানি রিলেশানশিপ"
+
+#. Translators: If found as last label character, these punctuation
+#. characters will prevent the default label_suffix to be appended to the
+#. label
+msgid ":?.!"
+msgstr ""
+
+msgid "This field is required."
+msgstr "এটি আবশ্যক।"
+
+msgid "Enter a whole number."
+msgstr "একটি পূর্ণসংখ্যা দিন"
+
+msgid "Enter a valid date."
+msgstr "বৈধ তারিখ দিন।"
+
+msgid "Enter a valid time."
+msgstr "বৈধ সময় দিন।"
+
+msgid "Enter a valid date/time."
+msgstr "বৈধ তারিখ/সময় দিন।"
+
+msgid "Enter a valid duration."
+msgstr ""
+
+#, python-brace-format
+msgid "The number of days must be between {min_days} and {max_days}."
+msgstr ""
+
+msgid "No file was submitted. Check the encoding type on the form."
+msgstr "কোন ফাইল দেয়া হয়নি। ফর্মের এনকোডিং ঠিক আছে কিনা দেখুন।"
+
+msgid "No file was submitted."
+msgstr "কোন ফাইল দেয়া হয়নি।"
+
+msgid "The submitted file is empty."
+msgstr "ফাইলটি খালি।"
+
+#, python-format
+msgid "Ensure this filename has at most %(max)d character (it has %(length)d)."
+msgid_plural ""
+"Ensure this filename has at most %(max)d characters (it has %(length)d)."
+msgstr[0] ""
+msgstr[1] ""
+
+msgid "Please either submit a file or check the clear checkbox, not both."
+msgstr ""
+"একটি ফাইল সাবমিট করুন অথবা ক্লিয়ার চেকবক্সটি চেক করে দিন, যে কোন একটি করুন।"
+
+msgid ""
+"Upload a valid image. The file you uploaded was either not an image or a "
+"corrupted image."
+msgstr ""
+"সঠিক ছবি আপলোড করুন। যে ফাইলটি আপলোড করা হয়েছে তা হয় ছবি নয় অথবা নষ্ট হয়ে "
+"যাওয়া ছবি।"
+
+#, python-format
+msgid "Select a valid choice. %(value)s is not one of the available choices."
+msgstr "%(value)s বৈধ নয়। অনুগ্রহ করে আরেকটি সিলেক্ট করুন।"
+
+msgid "Enter a list of values."
+msgstr "কয়েকটি মানের তালিকা দিন।"
+
+msgid "Enter a complete value."
+msgstr ""
+
+msgid "Enter a valid UUID."
+msgstr ""
+
+#. Translators: This is the default suffix added to form field labels
+msgid ":"
+msgstr ""
+
+#, python-format
+msgid "(Hidden field %(name)s) %(error)s"
+msgstr ""
+
+msgid "ManagementForm data is missing or has been tampered with"
+msgstr ""
+
+#, python-format
+msgid "Please submit %d or fewer forms."
+msgid_plural "Please submit %d or fewer forms."
+msgstr[0] ""
+msgstr[1] ""
+
+#, python-format
+msgid "Please submit %d or more forms."
+msgid_plural "Please submit %d or more forms."
+msgstr[0] ""
+msgstr[1] ""
+
+msgid "Order"
+msgstr "ক্রম"
+
+msgid "Delete"
+msgstr "মুছুন"
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s."
+msgstr ""
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s, which must be unique."
+msgstr ""
+
+#, python-format
+msgid ""
+"Please correct the duplicate data for %(field_name)s which must be unique "
+"for the %(lookup)s in %(date_field)s."
+msgstr ""
+
+msgid "Please correct the duplicate values below."
+msgstr ""
+
+msgid "The inline value did not match the parent instance."
+msgstr ""
+
+msgid "Select a valid choice. That choice is not one of the available choices."
+msgstr "এটি বৈধ নয়। অনুগ্রহ করে আরেকটি সিলেক্ট করুন।"
+
+#, python-format
+msgid "“%(pk)s” is not a valid value."
+msgstr ""
+
+#, python-format
+msgid ""
+"%(datetime)s couldn’t be interpreted in time zone %(current_timezone)s; it "
+"may be ambiguous or it may not exist."
+msgstr ""
+
+msgid "Clear"
+msgstr "পরিষ্কার করুন"
+
+msgid "Currently"
+msgstr "এই মুহুর্তে"
+
+msgid "Change"
+msgstr "পরিবর্তন"
+
+msgid "Unknown"
+msgstr "অজানা"
+
+msgid "Yes"
+msgstr "হ্যাঁ"
+
+msgid "No"
+msgstr "না"
+
+msgid "Year"
+msgstr ""
+
+msgid "Month"
+msgstr ""
+
+msgid "Day"
+msgstr ""
+
+msgid "yes,no,maybe"
+msgstr "হ্যাঁ,না,হয়তো"
+
+#, python-format
+msgid "%(size)d byte"
+msgid_plural "%(size)d bytes"
+msgstr[0] "%(size)d বাইট"
+msgstr[1] "%(size)d বাইট"
+
+#, python-format
+msgid "%s KB"
+msgstr "%s কিলোবাইট"
+
+#, python-format
+msgid "%s MB"
+msgstr "%s মেগাবাইট"
+
+#, python-format
+msgid "%s GB"
+msgstr "%s গিগাবাইট"
+
+#, python-format
+msgid "%s TB"
+msgstr "%s টেরাবাইট"
+
+#, python-format
+msgid "%s PB"
+msgstr "%s পেটাবাইট"
+
+msgid "p.m."
+msgstr "অপরাহ্ন"
+
+msgid "a.m."
+msgstr "পূর্বাহ্ন"
+
+msgid "PM"
+msgstr "অপরাহ্ন"
+
+msgid "AM"
+msgstr "পূর্বাহ্ন"
+
+msgid "midnight"
+msgstr "মধ্যরাত"
+
+msgid "noon"
+msgstr "দুপুর"
+
+msgid "Monday"
+msgstr "সোমবার"
+
+msgid "Tuesday"
+msgstr "মঙ্গলবার"
+
+msgid "Wednesday"
+msgstr "বুধবার"
+
+msgid "Thursday"
+msgstr "বৃহস্পতিবার"
+
+msgid "Friday"
+msgstr "শুক্রবার"
+
+msgid "Saturday"
+msgstr "শনিবার"
+
+msgid "Sunday"
+msgstr "রবিবার"
+
+msgid "Mon"
+msgstr "সোম"
+
+msgid "Tue"
+msgstr "মঙ্গল"
+
+msgid "Wed"
+msgstr "বুধ"
+
+msgid "Thu"
+msgstr "বৃহঃ"
+
+msgid "Fri"
+msgstr "শুক্র"
+
+msgid "Sat"
+msgstr "শনি"
+
+msgid "Sun"
+msgstr "রবি"
+
+msgid "January"
+msgstr "জানুয়ারি"
+
+msgid "February"
+msgstr "ফেব্রুয়ারি"
+
+msgid "March"
+msgstr "মার্চ"
+
+msgid "April"
+msgstr "এপ্রিল"
+
+msgid "May"
+msgstr "মে"
+
+msgid "June"
+msgstr "জুন"
+
+msgid "July"
+msgstr "জুলাই"
+
+msgid "August"
+msgstr "আগস্ট"
+
+msgid "September"
+msgstr "সেপ্টেম্বর"
+
+msgid "October"
+msgstr "অক্টোবর"
+
+msgid "November"
+msgstr "নভেম্বর"
+
+msgid "December"
+msgstr "ডিসেম্বর"
+
+msgid "jan"
+msgstr "জান."
+
+msgid "feb"
+msgstr "ফেব."
+
+msgid "mar"
+msgstr "মার্চ"
+
+msgid "apr"
+msgstr "এপ্রি."
+
+msgid "may"
+msgstr "মে"
+
+msgid "jun"
+msgstr "জুন"
+
+msgid "jul"
+msgstr "জুল."
+
+msgid "aug"
+msgstr "আগ."
+
+msgid "sep"
+msgstr "সেপ্টে."
+
+msgid "oct"
+msgstr "অক্টো."
+
+msgid "nov"
+msgstr "নভে."
+
+msgid "dec"
+msgstr "ডিসে."
+
+msgctxt "abbrev. month"
+msgid "Jan."
+msgstr "জানু."
+
+msgctxt "abbrev. month"
+msgid "Feb."
+msgstr "ফেব্রু."
+
+msgctxt "abbrev. month"
+msgid "March"
+msgstr "মার্চ"
+
+msgctxt "abbrev. month"
+msgid "April"
+msgstr "এপ্রিল"
+
+msgctxt "abbrev. month"
+msgid "May"
+msgstr "মে"
+
+msgctxt "abbrev. month"
+msgid "June"
+msgstr "জুন"
+
+msgctxt "abbrev. month"
+msgid "July"
+msgstr "জুলাই"
+
+msgctxt "abbrev. month"
+msgid "Aug."
+msgstr "আগ."
+
+msgctxt "abbrev. month"
+msgid "Sept."
+msgstr "সেপ্ট."
+
+msgctxt "abbrev. month"
+msgid "Oct."
+msgstr "অক্টো."
+
+msgctxt "abbrev. month"
+msgid "Nov."
+msgstr "নভে."
+
+msgctxt "abbrev. month"
+msgid "Dec."
+msgstr "ডিসে."
+
+msgctxt "alt. month"
+msgid "January"
+msgstr "জানুয়ারি"
+
+msgctxt "alt. month"
+msgid "February"
+msgstr "ফেব্রুয়ারি"
+
+msgctxt "alt. month"
+msgid "March"
+msgstr "মার্চ"
+
+msgctxt "alt. month"
+msgid "April"
+msgstr "এপ্রিল"
+
+msgctxt "alt. month"
+msgid "May"
+msgstr "মে"
+
+msgctxt "alt. month"
+msgid "June"
+msgstr "জুন"
+
+msgctxt "alt. month"
+msgid "July"
+msgstr "জুলাই"
+
+msgctxt "alt. month"
+msgid "August"
+msgstr "আগস্ট"
+
+msgctxt "alt. month"
+msgid "September"
+msgstr "সেপ্টেম্বর"
+
+msgctxt "alt. month"
+msgid "October"
+msgstr "অক্টোবর"
+
+msgctxt "alt. month"
+msgid "November"
+msgstr "নভেম্বর"
+
+msgctxt "alt. month"
+msgid "December"
+msgstr "ডিসেম্বর"
+
+msgid "This is not a valid IPv6 address."
+msgstr ""
+
+#, python-format
+msgctxt "String to return when truncating text"
+msgid "%(truncated_text)s…"
+msgstr ""
+
+msgid "or"
+msgstr "অথবা"
+
+#. Translators: This string is used as a separator between list elements
+msgid ", "
+msgstr ","
+
+#, python-format
+msgid "%d year"
+msgid_plural "%d years"
+msgstr[0] ""
+msgstr[1] ""
+
+#, python-format
+msgid "%d month"
+msgid_plural "%d months"
+msgstr[0] ""
+msgstr[1] ""
+
+#, python-format
+msgid "%d week"
+msgid_plural "%d weeks"
+msgstr[0] ""
+msgstr[1] ""
+
+#, python-format
+msgid "%d day"
+msgid_plural "%d days"
+msgstr[0] ""
+msgstr[1] ""
+
+#, python-format
+msgid "%d hour"
+msgid_plural "%d hours"
+msgstr[0] ""
+msgstr[1] ""
+
+#, python-format
+msgid "%d minute"
+msgid_plural "%d minutes"
+msgstr[0] ""
+msgstr[1] ""
+
+msgid "0 minutes"
+msgstr "0 মিনিট"
+
+msgid "Forbidden"
+msgstr ""
+
+msgid "CSRF verification failed. Request aborted."
+msgstr ""
+
+msgid ""
+"You are seeing this message because this HTTPS site requires a “Referer "
+"header” to be sent by your Web browser, but none was sent. This header is "
+"required for security reasons, to ensure that your browser is not being "
+"hijacked by third parties."
+msgstr ""
+
+msgid ""
+"If you have configured your browser to disable “Referer” headers, please re-"
+"enable them, at least for this site, or for HTTPS connections, or for “same-"
+"origin” requests."
+msgstr ""
+
+msgid ""
+"If you are using the tag or "
+"including the “Referrer-Policy: no-referrer” header, please remove them. The "
+"CSRF protection requires the “Referer” header to do strict referer checking. "
+"If you’re concerned about privacy, use alternatives like for links to third-party sites."
+msgstr ""
+
+msgid ""
+"You are seeing this message because this site requires a CSRF cookie when "
+"submitting forms. This cookie is required for security reasons, to ensure "
+"that your browser is not being hijacked by third parties."
+msgstr ""
+
+msgid ""
+"If you have configured your browser to disable cookies, please re-enable "
+"them, at least for this site, or for “same-origin” requests."
+msgstr ""
+
+msgid "More information is available with DEBUG=True."
+msgstr ""
+
+msgid "No year specified"
+msgstr "কোন বছর উল্লেখ করা হয়নি"
+
+msgid "Date out of range"
+msgstr ""
+
+msgid "No month specified"
+msgstr "কোন মাস উল্লেখ করা হয়নি"
+
+msgid "No day specified"
+msgstr "কোন দিন উল্লেখ করা হয়নি"
+
+msgid "No week specified"
+msgstr "কোন সপ্তাহ উল্লেখ করা হয়নি"
+
+#, python-format
+msgid "No %(verbose_name_plural)s available"
+msgstr "কোন %(verbose_name_plural)s নেই"
+
+#, python-format
+msgid ""
+"Future %(verbose_name_plural)s not available because %(class_name)s."
+"allow_future is False."
+msgstr ""
+
+#, python-format
+msgid "Invalid date string “%(datestr)s” given format “%(format)s”"
+msgstr ""
+
+#, python-format
+msgid "No %(verbose_name)s found matching the query"
+msgstr "কুয়েরি ম্যাচ করে এমন কোন %(verbose_name)s পাওয়া যায় নি"
+
+msgid "Page is not “last”, nor can it be converted to an int."
+msgstr ""
+
+#, python-format
+msgid "Invalid page (%(page_number)s): %(message)s"
+msgstr ""
+
+#, python-format
+msgid "Empty list and “%(class_name)s.allow_empty” is False."
+msgstr ""
+
+msgid "Directory indexes are not allowed here."
+msgstr "ডিরেক্টরি ইনডেক্স অনুমোদিত নয়"
+
+#, python-format
+msgid "“%(path)s” does not exist"
+msgstr ""
+
+#, python-format
+msgid "Index of %(directory)s"
+msgstr "%(directory)s এর ইনডেক্স"
+
+msgid "Django: the Web framework for perfectionists with deadlines."
+msgstr ""
+
+#, python-format
+msgid ""
+"View release notes for Django %(version)s"
+msgstr ""
+
+msgid "The install worked successfully! Congratulations!"
+msgstr ""
+
+#, python-format
+msgid ""
+"You are seeing this page because DEBUG=True is in your settings file and you have not configured any "
+"URLs."
+msgstr ""
+
+msgid "Django Documentation"
+msgstr ""
+
+msgid "Topics, references, & how-to’s"
+msgstr ""
+
+msgid "Tutorial: A Polling App"
+msgstr ""
+
+msgid "Get started with Django"
+msgstr ""
+
+msgid "Django Community"
+msgstr ""
+
+msgid "Connect, get help, or contribute"
+msgstr ""
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bn/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bn/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bn/formats.py b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bn/formats.py
new file mode 100644
index 00000000..9d1bb09d
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bn/formats.py
@@ -0,0 +1,32 @@
+# This file is distributed under the same license as the Django package.
+#
+# The *_FORMAT strings use the Django date format syntax,
+# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+DATE_FORMAT = "j F, Y"
+TIME_FORMAT = "g:i A"
+# DATETIME_FORMAT =
+YEAR_MONTH_FORMAT = "F Y"
+MONTH_DAY_FORMAT = "j F"
+SHORT_DATE_FORMAT = "j M, Y"
+# SHORT_DATETIME_FORMAT =
+FIRST_DAY_OF_WEEK = 6 # Saturday
+
+# The *_INPUT_FORMATS strings use the Python strftime format syntax,
+# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
+DATE_INPUT_FORMATS = [
+ "%d/%m/%Y", # 25/10/2016
+ "%d/%m/%y", # 25/10/16
+ "%d-%m-%Y", # 25-10-2016
+ "%d-%m-%y", # 25-10-16
+]
+TIME_INPUT_FORMATS = [
+ "%H:%M:%S", # 14:30:59
+ "%H:%M", # 14:30
+]
+DATETIME_INPUT_FORMATS = [
+ "%d/%m/%Y %H:%M:%S", # 25/10/2006 14:30:59
+ "%d/%m/%Y %H:%M", # 25/10/2006 14:30
+]
+DECIMAL_SEPARATOR = "."
+THOUSAND_SEPARATOR = ","
+# NUMBER_GROUPING =
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/br/LC_MESSAGES/django.mo b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/br/LC_MESSAGES/django.mo
new file mode 100644
index 00000000..d864abe9
Binary files /dev/null and b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/br/LC_MESSAGES/django.mo differ
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/br/LC_MESSAGES/django.po b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/br/LC_MESSAGES/django.po
new file mode 100644
index 00000000..3b1a759b
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/br/LC_MESSAGES/django.po
@@ -0,0 +1,1297 @@
+# This file is distributed under the same license as the Django package.
+#
+# Translators:
+# Claude Paroz , 2020
+# Ewen , 2021
+# Fulup , 2012,2014
+msgid ""
+msgstr ""
+"Project-Id-Version: django\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2021-09-21 10:22+0200\n"
+"PO-Revision-Date: 2021-11-18 21:19+0000\n"
+"Last-Translator: Transifex Bot <>\n"
+"Language-Team: Breton (http://www.transifex.com/django/django/language/br/)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Language: br\n"
+"Plural-Forms: nplurals=5; plural=((n%10 == 1) && (n%100 != 11) && (n%100 !"
+"=71) && (n%100 !=91) ? 0 :(n%10 == 2) && (n%100 != 12) && (n%100 !=72) && (n"
+"%100 !=92) ? 1 :(n%10 ==3 || n%10==4 || n%10==9) && (n%100 < 10 || n% 100 > "
+"19) && (n%100 < 70 || n%100 > 79) && (n%100 < 90 || n%100 > 99) ? 2 :(n != 0 "
+"&& n % 1000000 == 0) ? 3 : 4);\n"
+
+msgid "Afrikaans"
+msgstr "Afrikaneg"
+
+msgid "Arabic"
+msgstr "Arabeg"
+
+msgid "Algerian Arabic"
+msgstr ""
+
+msgid "Asturian"
+msgstr "Astureg"
+
+msgid "Azerbaijani"
+msgstr "Azeri"
+
+msgid "Bulgarian"
+msgstr "Bulgareg"
+
+msgid "Belarusian"
+msgstr "Belaruseg"
+
+msgid "Bengali"
+msgstr "Bengaleg"
+
+msgid "Breton"
+msgstr "Brezhoneg"
+
+msgid "Bosnian"
+msgstr "Bosneg"
+
+msgid "Catalan"
+msgstr "Katalaneg"
+
+msgid "Czech"
+msgstr "Tchekeg"
+
+msgid "Welsh"
+msgstr "Kembraeg"
+
+msgid "Danish"
+msgstr "Daneg"
+
+msgid "German"
+msgstr "Alamaneg"
+
+msgid "Lower Sorbian"
+msgstr ""
+
+msgid "Greek"
+msgstr "Gresianeg"
+
+msgid "English"
+msgstr "Saozneg"
+
+msgid "Australian English"
+msgstr "Saozneg Aostralia"
+
+msgid "British English"
+msgstr "Saozneg Breizh-Veur"
+
+msgid "Esperanto"
+msgstr "Esperanteg"
+
+msgid "Spanish"
+msgstr "Spagnoleg"
+
+msgid "Argentinian Spanish"
+msgstr "Spagnoleg Arc'hantina"
+
+msgid "Colombian Spanish"
+msgstr "Spagnoleg Kolombia"
+
+msgid "Mexican Spanish"
+msgstr "Spagnoleg Mec'hiko"
+
+msgid "Nicaraguan Spanish"
+msgstr "Spagnoleg Nicaragua"
+
+msgid "Venezuelan Spanish"
+msgstr "Spagnoleg Venezuela"
+
+msgid "Estonian"
+msgstr "Estoneg"
+
+msgid "Basque"
+msgstr "Euskareg"
+
+msgid "Persian"
+msgstr "Perseg"
+
+msgid "Finnish"
+msgstr "Finneg"
+
+msgid "French"
+msgstr "Galleg"
+
+msgid "Frisian"
+msgstr "Frizeg"
+
+msgid "Irish"
+msgstr "Iwerzhoneg"
+
+msgid "Scottish Gaelic"
+msgstr ""
+
+msgid "Galician"
+msgstr "Galizeg"
+
+msgid "Hebrew"
+msgstr "Hebraeg"
+
+msgid "Hindi"
+msgstr "Hindi"
+
+msgid "Croatian"
+msgstr "Kroateg"
+
+msgid "Upper Sorbian"
+msgstr ""
+
+msgid "Hungarian"
+msgstr "Hungareg"
+
+msgid "Armenian"
+msgstr ""
+
+msgid "Interlingua"
+msgstr "Interlingua"
+
+msgid "Indonesian"
+msgstr "Indonezeg"
+
+msgid "Igbo"
+msgstr ""
+
+msgid "Ido"
+msgstr "Ido"
+
+msgid "Icelandic"
+msgstr "Islandeg"
+
+msgid "Italian"
+msgstr "Italianeg"
+
+msgid "Japanese"
+msgstr "Japaneg"
+
+msgid "Georgian"
+msgstr "Jorjianeg"
+
+msgid "Kabyle"
+msgstr ""
+
+msgid "Kazakh"
+msgstr "kazak"
+
+msgid "Khmer"
+msgstr "Khmer"
+
+msgid "Kannada"
+msgstr "Kannata"
+
+msgid "Korean"
+msgstr "Koreaneg"
+
+msgid "Kyrgyz"
+msgstr ""
+
+msgid "Luxembourgish"
+msgstr "Luksembourgeg"
+
+msgid "Lithuanian"
+msgstr "Lituaneg"
+
+msgid "Latvian"
+msgstr "Latveg"
+
+msgid "Macedonian"
+msgstr "Makedoneg"
+
+msgid "Malayalam"
+msgstr "Malayalam"
+
+msgid "Mongolian"
+msgstr "Mongoleg"
+
+msgid "Marathi"
+msgstr "Marathi"
+
+msgid "Malay"
+msgstr ""
+
+msgid "Burmese"
+msgstr "Burmeg"
+
+msgid "Norwegian Bokmål"
+msgstr ""
+
+msgid "Nepali"
+msgstr "nepaleg"
+
+msgid "Dutch"
+msgstr "Nederlandeg"
+
+msgid "Norwegian Nynorsk"
+msgstr "Norvegeg Nynorsk"
+
+msgid "Ossetic"
+msgstr "Oseteg"
+
+msgid "Punjabi"
+msgstr "Punjabeg"
+
+msgid "Polish"
+msgstr "Poloneg"
+
+msgid "Portuguese"
+msgstr "Portugaleg"
+
+msgid "Brazilian Portuguese"
+msgstr "Portugaleg Brazil"
+
+msgid "Romanian"
+msgstr "Roumaneg"
+
+msgid "Russian"
+msgstr "Rusianeg"
+
+msgid "Slovak"
+msgstr "Slovakeg"
+
+msgid "Slovenian"
+msgstr "Sloveneg"
+
+msgid "Albanian"
+msgstr "Albaneg"
+
+msgid "Serbian"
+msgstr "Serbeg"
+
+msgid "Serbian Latin"
+msgstr "Serbeg e lizherennoù latin"
+
+msgid "Swedish"
+msgstr "Svedeg"
+
+msgid "Swahili"
+msgstr "swahileg"
+
+msgid "Tamil"
+msgstr "Tamileg"
+
+msgid "Telugu"
+msgstr "Telougou"
+
+msgid "Tajik"
+msgstr ""
+
+msgid "Thai"
+msgstr "Thai"
+
+msgid "Turkmen"
+msgstr ""
+
+msgid "Turkish"
+msgstr "Turkeg"
+
+msgid "Tatar"
+msgstr "tatar"
+
+msgid "Udmurt"
+msgstr "Oudmourteg"
+
+msgid "Ukrainian"
+msgstr "Ukraineg"
+
+msgid "Urdu"
+msgstr "Ourdou"
+
+msgid "Uzbek"
+msgstr ""
+
+msgid "Vietnamese"
+msgstr "Vietnameg"
+
+msgid "Simplified Chinese"
+msgstr "Sinaeg eeunaet"
+
+msgid "Traditional Chinese"
+msgstr "Sinaeg hengounel"
+
+msgid "Messages"
+msgstr "Kemennadenn"
+
+msgid "Site Maps"
+msgstr "Tresoù al lec'hienn"
+
+msgid "Static Files"
+msgstr "Restroù statek"
+
+msgid "Syndication"
+msgstr "Sindikadur"
+
+#. Translators: String used to replace omitted page numbers in elided page
+#. range generated by paginators, e.g. [1, 2, '…', 5, 6, 7, '…', 9, 10].
+msgid "…"
+msgstr "..."
+
+msgid "That page number is not an integer"
+msgstr ""
+
+msgid "That page number is less than 1"
+msgstr "An niver a bajenn mañ a zo bihanoc'h eget 1."
+
+msgid "That page contains no results"
+msgstr "N'eus disoc'h er pajenn-mañ."
+
+msgid "Enter a valid value."
+msgstr "Merkit un talvoud reizh"
+
+msgid "Enter a valid URL."
+msgstr "Merkit un URL reizh"
+
+msgid "Enter a valid integer."
+msgstr "Merkit un niver anterin reizh."
+
+msgid "Enter a valid email address."
+msgstr "Merkit ur chomlec'h postel reizh"
+
+#. Translators: "letters" means latin letters: a-z and A-Z.
+msgid ""
+"Enter a valid “slug” consisting of letters, numbers, underscores or hyphens."
+msgstr ""
+
+msgid ""
+"Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or "
+"hyphens."
+msgstr ""
+
+msgid "Enter a valid IPv4 address."
+msgstr "Merkit ur chomlec'h IPv4 reizh."
+
+msgid "Enter a valid IPv6 address."
+msgstr "Merkit ur chomlec'h IPv6 reizh."
+
+msgid "Enter a valid IPv4 or IPv6 address."
+msgstr "Merkit ur chomlec'h IPv4 pe IPv6 reizh."
+
+msgid "Enter only digits separated by commas."
+msgstr "Merkañ hepken sifroù dispartiet dre skejoù."
+
+#, python-format
+msgid "Ensure this value is %(limit_value)s (it is %(show_value)s)."
+msgstr ""
+"Bezit sur ez eo an talvoud-mañ %(limit_value)s (evit ar mare ez eo "
+"%(show_value)s)."
+
+#, python-format
+msgid "Ensure this value is less than or equal to %(limit_value)s."
+msgstr "Gwiriit mat emañ an talvoud-mañ a-is pe par da %(limit_value)s."
+
+#, python-format
+msgid "Ensure this value is greater than or equal to %(limit_value)s."
+msgstr "Gwiriit mat emañ an talvoud-mañ a-us pe par da %(limit_value)s."
+
+#, python-format
+msgid ""
+"Ensure this value has at least %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at least %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+
+#, python-format
+msgid ""
+"Ensure this value has at most %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at most %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+
+msgid "Enter a number."
+msgstr "Merkit un niver."
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s digit in total."
+msgid_plural "Ensure that there are no more than %(max)s digits in total."
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s decimal place."
+msgid_plural "Ensure that there are no more than %(max)s decimal places."
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+
+#, python-format
+msgid ""
+"Ensure that there are no more than %(max)s digit before the decimal point."
+msgid_plural ""
+"Ensure that there are no more than %(max)s digits before the decimal point."
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+
+#, python-format
+msgid ""
+"File extension “%(extension)s” is not allowed. Allowed extensions are: "
+"%(allowed_extensions)s."
+msgstr ""
+
+msgid "Null characters are not allowed."
+msgstr ""
+
+msgid "and"
+msgstr "ha"
+
+#, python-format
+msgid "%(model_name)s with this %(field_labels)s already exists."
+msgstr ""
+
+#, python-format
+msgid "Value %(value)r is not a valid choice."
+msgstr ""
+
+msgid "This field cannot be null."
+msgstr "N'hall ket ar vaezienn chom goullo"
+
+msgid "This field cannot be blank."
+msgstr "N'hall ket ar vaezienn chom goullo"
+
+#, python-format
+msgid "%(model_name)s with this %(field_label)s already exists."
+msgstr "Bez' ez eus c'hoazh eus ur %(model_name)s gant ar %(field_label)s-mañ."
+
+#. Translators: The 'lookup_type' is one of 'date', 'year' or 'month'.
+#. Eg: "Title must be unique for pub_date year"
+#, python-format
+msgid ""
+"%(field_label)s must be unique for %(date_field_label)s %(lookup_type)s."
+msgstr ""
+
+#, python-format
+msgid "Field of type: %(field_type)s"
+msgstr "Seurt maezienn : %(field_type)s"
+
+#, python-format
+msgid "“%(value)s” value must be either True or False."
+msgstr ""
+
+#, python-format
+msgid "“%(value)s” value must be either True, False, or None."
+msgstr ""
+
+msgid "Boolean (Either True or False)"
+msgstr "Boulean (gwir pe gaou)"
+
+#, python-format
+msgid "String (up to %(max_length)s)"
+msgstr "neudennad arouezennoù (betek %(max_length)s)"
+
+msgid "Comma-separated integers"
+msgstr "Niveroù anterin dispartiet dre ur skej"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid date format. It must be in YYYY-MM-DD "
+"format."
+msgstr ""
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD) but it is an invalid "
+"date."
+msgstr ""
+
+msgid "Date (without time)"
+msgstr "Deizad (hep eur)"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in YYYY-MM-DD HH:MM[:ss[."
+"uuuuuu]][TZ] format."
+msgstr ""
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD HH:MM[:ss[.uuuuuu]]"
+"[TZ]) but it is an invalid date/time."
+msgstr ""
+
+msgid "Date (with time)"
+msgstr "Deizad (gant an eur)"
+
+#, python-format
+msgid "“%(value)s” value must be a decimal number."
+msgstr ""
+
+msgid "Decimal number"
+msgstr "Niver dekvedennel"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in [DD] [[HH:]MM:]ss[."
+"uuuuuu] format."
+msgstr ""
+
+msgid "Duration"
+msgstr ""
+
+msgid "Email address"
+msgstr "Chomlec'h postel"
+
+msgid "File path"
+msgstr "Treug war-du ar restr"
+
+#, python-format
+msgid "“%(value)s” value must be a float."
+msgstr ""
+
+msgid "Floating point number"
+msgstr "Niver gant skej nij"
+
+#, python-format
+msgid "“%(value)s” value must be an integer."
+msgstr ""
+
+msgid "Integer"
+msgstr "Anterin"
+
+msgid "Big (8 byte) integer"
+msgstr "Anterin bras (8 okted)"
+
+msgid "Small integer"
+msgstr "Niver anterin bihan"
+
+msgid "IPv4 address"
+msgstr "Chomlec'h IPv4"
+
+msgid "IP address"
+msgstr "Chomlec'h IP"
+
+#, python-format
+msgid "“%(value)s” value must be either None, True or False."
+msgstr ""
+
+msgid "Boolean (Either True, False or None)"
+msgstr "Boulean (gwir pe gaou pe netra)"
+
+msgid "Positive big integer"
+msgstr ""
+
+msgid "Positive integer"
+msgstr "Niver anterin pozitivel"
+
+msgid "Positive small integer"
+msgstr "Niver anterin bihan pozitivel"
+
+#, python-format
+msgid "Slug (up to %(max_length)s)"
+msgstr "Slug (betek %(max_length)s arouez.)"
+
+msgid "Text"
+msgstr "Testenn"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in HH:MM[:ss[.uuuuuu]] "
+"format."
+msgstr ""
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (HH:MM[:ss[.uuuuuu]]) but it is an "
+"invalid time."
+msgstr ""
+
+msgid "Time"
+msgstr "Eur"
+
+msgid "URL"
+msgstr "URL"
+
+msgid "Raw binary data"
+msgstr ""
+
+#, python-format
+msgid "“%(value)s” is not a valid UUID."
+msgstr ""
+
+msgid "Universally unique identifier"
+msgstr ""
+
+msgid "File"
+msgstr "Restr"
+
+msgid "Image"
+msgstr "Skeudenn"
+
+msgid "A JSON object"
+msgstr ""
+
+msgid "Value must be valid JSON."
+msgstr ""
+
+#, python-format
+msgid "%(model)s instance with %(field)s %(value)r does not exist."
+msgstr ""
+
+msgid "Foreign Key (type determined by related field)"
+msgstr "Alc'hwez estren (seurt termenet dre ar vaezienn liammet)"
+
+msgid "One-to-one relationship"
+msgstr "Darempred unan-ouzh-unan"
+
+#, python-format
+msgid "%(from)s-%(to)s relationship"
+msgstr ""
+
+#, python-format
+msgid "%(from)s-%(to)s relationships"
+msgstr ""
+
+msgid "Many-to-many relationship"
+msgstr "Darempred lies-ouzh-lies"
+
+#. Translators: If found as last label character, these punctuation
+#. characters will prevent the default label_suffix to be appended to the
+#. label
+msgid ":?.!"
+msgstr ""
+
+msgid "This field is required."
+msgstr "Rekis eo leuniañ ar vaezienn."
+
+msgid "Enter a whole number."
+msgstr "Merkit un niver anterin."
+
+msgid "Enter a valid date."
+msgstr "Merkit un deiziad reizh"
+
+msgid "Enter a valid time."
+msgstr "Merkit un eur reizh"
+
+msgid "Enter a valid date/time."
+msgstr "Merkit un eur/deiziad reizh"
+
+msgid "Enter a valid duration."
+msgstr ""
+
+#, python-brace-format
+msgid "The number of days must be between {min_days} and {max_days}."
+msgstr ""
+
+msgid "No file was submitted. Check the encoding type on the form."
+msgstr "N'eus ket kaset restr ebet. Gwiriit ar seurt enkodañ evit ar restr"
+
+msgid "No file was submitted."
+msgstr "N'eus bet kaset restr ebet."
+
+msgid "The submitted file is empty."
+msgstr "Goullo eo ar restr kaset."
+
+#, python-format
+msgid "Ensure this filename has at most %(max)d character (it has %(length)d)."
+msgid_plural ""
+"Ensure this filename has at most %(max)d characters (it has %(length)d)."
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+
+msgid "Please either submit a file or check the clear checkbox, not both."
+msgstr "Kasit ur restr pe askit al log riñsañ; an eil pe egile"
+
+msgid ""
+"Upload a valid image. The file you uploaded was either not an image or a "
+"corrupted image."
+msgstr ""
+"Enpozhiit ur skeudenn reizh. Ar seurt bet enporzhiet ganeoc'h a oa foeltret "
+"pe ne oa ket ur skeudenn"
+
+#, python-format
+msgid "Select a valid choice. %(value)s is not one of the available choices."
+msgstr "Dizuit un dibab reizh. %(value)s n'emañ ket e-touez an dibaboù posupl."
+
+msgid "Enter a list of values."
+msgstr "Merkit ur roll talvoudoù"
+
+msgid "Enter a complete value."
+msgstr "Merkañ un talvoud klok"
+
+msgid "Enter a valid UUID."
+msgstr ""
+
+msgid "Enter a valid JSON."
+msgstr ""
+
+#. Translators: This is the default suffix added to form field labels
+msgid ":"
+msgstr ""
+
+#, python-format
+msgid "(Hidden field %(name)s) %(error)s"
+msgstr ""
+
+#, python-format
+msgid ""
+"ManagementForm data is missing or has been tampered with. Missing fields: "
+"%(field_names)s. You may need to file a bug report if the issue persists."
+msgstr ""
+
+#, python-format
+msgid "Please submit at most %d form."
+msgid_plural "Please submit at most %d forms."
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+
+#, python-format
+msgid "Please submit at least %d form."
+msgid_plural "Please submit at least %d forms."
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+
+msgid "Order"
+msgstr "Urzh"
+
+msgid "Delete"
+msgstr "Diverkañ"
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s."
+msgstr "Reizhit ar roadennoù e doubl e %(field)s."
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s, which must be unique."
+msgstr ""
+"Reizhit ar roadennoù e doubl e %(field)s, na zle bezañ enni nemet talvoudoù "
+"dzho o-unan."
+
+#, python-format
+msgid ""
+"Please correct the duplicate data for %(field_name)s which must be unique "
+"for the %(lookup)s in %(date_field)s."
+msgstr ""
+"Reizhit ar roadennoù e doubl e %(field_name)s a rank bezañ ennañ talvodoù en "
+"o-unan evit lodenn %(lookup)s %(date_field)s."
+
+msgid "Please correct the duplicate values below."
+msgstr "Reizhañ ar roadennoù e doubl zo a-is"
+
+msgid "The inline value did not match the parent instance."
+msgstr ""
+
+msgid "Select a valid choice. That choice is not one of the available choices."
+msgstr "Diuzit un dibab reizh. N'emañ ket an dibab-mañ e-touez ar re bosupl."
+
+#, python-format
+msgid "“%(pk)s” is not a valid value."
+msgstr ""
+
+#, python-format
+msgid ""
+"%(datetime)s couldn’t be interpreted in time zone %(current_timezone)s; it "
+"may be ambiguous or it may not exist."
+msgstr ""
+
+msgid "Clear"
+msgstr "Riñsañ"
+
+msgid "Currently"
+msgstr "Evit ar mare"
+
+msgid "Change"
+msgstr "Kemmañ"
+
+msgid "Unknown"
+msgstr "Dianav"
+
+msgid "Yes"
+msgstr "Ya"
+
+msgid "No"
+msgstr "Ket"
+
+#. Translators: Please do not add spaces around commas.
+msgid "yes,no,maybe"
+msgstr "ya,ket,marteze"
+
+#, python-format
+msgid "%(size)d byte"
+msgid_plural "%(size)d bytes"
+msgstr[0] "%(size)d okted"
+msgstr[1] "%(size)d okted"
+msgstr[2] "%(size)d okted"
+msgstr[3] "%(size)d okted"
+msgstr[4] "%(size)d okted"
+
+#, python-format
+msgid "%s KB"
+msgstr "%s KB"
+
+#, python-format
+msgid "%s MB"
+msgstr "%s MB"
+
+#, python-format
+msgid "%s GB"
+msgstr "%s GB"
+
+#, python-format
+msgid "%s TB"
+msgstr "%s TB"
+
+#, python-format
+msgid "%s PB"
+msgstr "%s PB"
+
+msgid "p.m."
+msgstr "g.m."
+
+msgid "a.m."
+msgstr "mintin"
+
+msgid "PM"
+msgstr "G.M."
+
+msgid "AM"
+msgstr "Mintin"
+
+msgid "midnight"
+msgstr "hanternoz"
+
+msgid "noon"
+msgstr "kreisteiz"
+
+msgid "Monday"
+msgstr "Lun"
+
+msgid "Tuesday"
+msgstr "Meurzh"
+
+msgid "Wednesday"
+msgstr "Merc'her"
+
+msgid "Thursday"
+msgstr "Yaou"
+
+msgid "Friday"
+msgstr "Gwener"
+
+msgid "Saturday"
+msgstr "Sadorn"
+
+msgid "Sunday"
+msgstr "Sul"
+
+msgid "Mon"
+msgstr "Lun"
+
+msgid "Tue"
+msgstr "Meu"
+
+msgid "Wed"
+msgstr "Mer"
+
+msgid "Thu"
+msgstr "Yao"
+
+msgid "Fri"
+msgstr "Gwe"
+
+msgid "Sat"
+msgstr "Sad"
+
+msgid "Sun"
+msgstr "Sul"
+
+msgid "January"
+msgstr "Genver"
+
+msgid "February"
+msgstr "C'hwevrer"
+
+msgid "March"
+msgstr "Meurzh"
+
+msgid "April"
+msgstr "Ebrel"
+
+msgid "May"
+msgstr "Mae"
+
+msgid "June"
+msgstr "Mezheven"
+
+msgid "July"
+msgstr "Gouere"
+
+msgid "August"
+msgstr "Eost"
+
+msgid "September"
+msgstr "Gwengolo"
+
+msgid "October"
+msgstr "Here"
+
+msgid "November"
+msgstr "Du"
+
+msgid "December"
+msgstr "Kerzu"
+
+msgid "jan"
+msgstr "Gen"
+
+msgid "feb"
+msgstr "C'hwe"
+
+msgid "mar"
+msgstr "Meu"
+
+msgid "apr"
+msgstr "Ebr"
+
+msgid "may"
+msgstr "Mae"
+
+msgid "jun"
+msgstr "Mez"
+
+msgid "jul"
+msgstr "Gou"
+
+msgid "aug"
+msgstr "Eos"
+
+msgid "sep"
+msgstr "Gwe"
+
+msgid "oct"
+msgstr "Her"
+
+msgid "nov"
+msgstr "Du"
+
+msgid "dec"
+msgstr "Kzu"
+
+msgctxt "abbrev. month"
+msgid "Jan."
+msgstr "Gen."
+
+msgctxt "abbrev. month"
+msgid "Feb."
+msgstr "C'hwe."
+
+msgctxt "abbrev. month"
+msgid "March"
+msgstr "Meu."
+
+msgctxt "abbrev. month"
+msgid "April"
+msgstr "Ebr."
+
+msgctxt "abbrev. month"
+msgid "May"
+msgstr "Mae"
+
+msgctxt "abbrev. month"
+msgid "June"
+msgstr "Mez."
+
+msgctxt "abbrev. month"
+msgid "July"
+msgstr "Gou."
+
+msgctxt "abbrev. month"
+msgid "Aug."
+msgstr "Eos."
+
+msgctxt "abbrev. month"
+msgid "Sept."
+msgstr "Gwe."
+
+msgctxt "abbrev. month"
+msgid "Oct."
+msgstr "Her."
+
+msgctxt "abbrev. month"
+msgid "Nov."
+msgstr "Du"
+
+msgctxt "abbrev. month"
+msgid "Dec."
+msgstr "Kzu"
+
+msgctxt "alt. month"
+msgid "January"
+msgstr "Genver"
+
+msgctxt "alt. month"
+msgid "February"
+msgstr "C'hwevrer"
+
+msgctxt "alt. month"
+msgid "March"
+msgstr "Meurzh"
+
+msgctxt "alt. month"
+msgid "April"
+msgstr "Ebrel"
+
+msgctxt "alt. month"
+msgid "May"
+msgstr "Mae"
+
+msgctxt "alt. month"
+msgid "June"
+msgstr "Mezheven"
+
+msgctxt "alt. month"
+msgid "July"
+msgstr "Gouere"
+
+msgctxt "alt. month"
+msgid "August"
+msgstr "Eost"
+
+msgctxt "alt. month"
+msgid "September"
+msgstr "Gwengolo"
+
+msgctxt "alt. month"
+msgid "October"
+msgstr "Here"
+
+msgctxt "alt. month"
+msgid "November"
+msgstr "Du"
+
+msgctxt "alt. month"
+msgid "December"
+msgstr "Kerzu"
+
+msgid "This is not a valid IPv6 address."
+msgstr ""
+
+#, python-format
+msgctxt "String to return when truncating text"
+msgid "%(truncated_text)s…"
+msgstr ""
+
+msgid "or"
+msgstr "pe"
+
+#. Translators: This string is used as a separator between list elements
+msgid ", "
+msgstr ","
+
+#, python-format
+msgid "%(num)d year"
+msgid_plural "%(num)d years"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+
+#, python-format
+msgid "%(num)d month"
+msgid_plural "%(num)d months"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+
+#, python-format
+msgid "%(num)d week"
+msgid_plural "%(num)d weeks"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+
+#, python-format
+msgid "%(num)d day"
+msgid_plural "%(num)d days"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+
+#, python-format
+msgid "%(num)d hour"
+msgid_plural "%(num)d hours"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+
+#, python-format
+msgid "%(num)d minute"
+msgid_plural "%(num)d minutes"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+msgstr[3] ""
+msgstr[4] ""
+
+msgid "Forbidden"
+msgstr "Difennet"
+
+msgid "CSRF verification failed. Request aborted."
+msgstr ""
+
+msgid ""
+"You are seeing this message because this HTTPS site requires a “Referer "
+"header” to be sent by your web browser, but none was sent. This header is "
+"required for security reasons, to ensure that your browser is not being "
+"hijacked by third parties."
+msgstr ""
+
+msgid ""
+"If you have configured your browser to disable “Referer” headers, please re-"
+"enable them, at least for this site, or for HTTPS connections, or for “same-"
+"origin” requests."
+msgstr ""
+
+msgid ""
+"If you are using the tag or "
+"including the “Referrer-Policy: no-referrer” header, please remove them. The "
+"CSRF protection requires the “Referer” header to do strict referer checking. "
+"If you’re concerned about privacy, use alternatives like for links to third-party sites."
+msgstr ""
+
+msgid ""
+"You are seeing this message because this site requires a CSRF cookie when "
+"submitting forms. This cookie is required for security reasons, to ensure "
+"that your browser is not being hijacked by third parties."
+msgstr ""
+
+msgid ""
+"If you have configured your browser to disable cookies, please re-enable "
+"them, at least for this site, or for “same-origin” requests."
+msgstr ""
+
+msgid "More information is available with DEBUG=True."
+msgstr ""
+
+msgid "No year specified"
+msgstr "N'eus bet resisaet bloavezh ebet"
+
+msgid "Date out of range"
+msgstr ""
+
+msgid "No month specified"
+msgstr "N'eus bet resisaet miz ebet"
+
+msgid "No day specified"
+msgstr "N'eus bet resisaet deiz ebet"
+
+msgid "No week specified"
+msgstr "N'eus bet resisaet sizhun ebet"
+
+#, python-format
+msgid "No %(verbose_name_plural)s available"
+msgstr "N'eus %(verbose_name_plural)s ebet da gaout."
+
+#, python-format
+msgid ""
+"Future %(verbose_name_plural)s not available because %(class_name)s."
+"allow_future is False."
+msgstr ""
+"En dazont ne vo ket a %(verbose_name_plural)s rak faos eo %(class_name)s."
+"allow_future."
+
+#, python-format
+msgid "Invalid date string “%(datestr)s” given format “%(format)s”"
+msgstr ""
+
+#, python-format
+msgid "No %(verbose_name)s found matching the query"
+msgstr ""
+"N'eus bet kavet traezenn %(verbose_name)s ebet o klotaén gant ar goulenn"
+
+msgid "Page is not “last”, nor can it be converted to an int."
+msgstr ""
+
+#, python-format
+msgid "Invalid page (%(page_number)s): %(message)s"
+msgstr ""
+
+#, python-format
+msgid "Empty list and “%(class_name)s.allow_empty” is False."
+msgstr ""
+
+msgid "Directory indexes are not allowed here."
+msgstr "N'haller ket diskwel endalc'had ar c'havlec'h-mañ."
+
+#, python-format
+msgid "“%(path)s” does not exist"
+msgstr ""
+
+#, python-format
+msgid "Index of %(directory)s"
+msgstr "Meneger %(directory)s"
+
+msgid "The install worked successfully! Congratulations!"
+msgstr ""
+
+#, python-format
+msgid ""
+"View release notes for Django %(version)s"
+msgstr ""
+
+#, python-format
+msgid ""
+"You are seeing this page because DEBUG=True is in your settings file and you have not configured any "
+"URLs."
+msgstr ""
+
+msgid "Django Documentation"
+msgstr ""
+
+msgid "Topics, references, & how-to’s"
+msgstr ""
+
+msgid "Tutorial: A Polling App"
+msgstr ""
+
+msgid "Get started with Django"
+msgstr ""
+
+msgid "Django Community"
+msgstr ""
+
+msgid "Connect, get help, or contribute"
+msgstr ""
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bs/LC_MESSAGES/django.mo b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bs/LC_MESSAGES/django.mo
new file mode 100644
index 00000000..064cc5d8
Binary files /dev/null and b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bs/LC_MESSAGES/django.mo differ
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bs/LC_MESSAGES/django.po b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bs/LC_MESSAGES/django.po
new file mode 100644
index 00000000..a985b84e
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bs/LC_MESSAGES/django.po
@@ -0,0 +1,1238 @@
+# This file is distributed under the same license as the Django package.
+#
+# Translators:
+# Filip Dupanović , 2011
+# Jannis Leidel , 2011
+msgid ""
+msgstr ""
+"Project-Id-Version: django\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2019-09-27 22:40+0200\n"
+"PO-Revision-Date: 2019-11-05 00:38+0000\n"
+"Last-Translator: Ramiro Morales\n"
+"Language-Team: Bosnian (http://www.transifex.com/django/django/language/"
+"bs/)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Language: bs\n"
+"Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n"
+"%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n"
+
+msgid "Afrikaans"
+msgstr ""
+
+msgid "Arabic"
+msgstr "arapski"
+
+msgid "Asturian"
+msgstr ""
+
+msgid "Azerbaijani"
+msgstr "Azerbejdžanski"
+
+msgid "Bulgarian"
+msgstr "bugarski"
+
+msgid "Belarusian"
+msgstr ""
+
+msgid "Bengali"
+msgstr "bengalski"
+
+msgid "Breton"
+msgstr ""
+
+msgid "Bosnian"
+msgstr "bosanski"
+
+msgid "Catalan"
+msgstr "katalonski"
+
+msgid "Czech"
+msgstr "češki"
+
+msgid "Welsh"
+msgstr "velški"
+
+msgid "Danish"
+msgstr "danski"
+
+msgid "German"
+msgstr "njemački"
+
+msgid "Lower Sorbian"
+msgstr ""
+
+msgid "Greek"
+msgstr "grčki"
+
+msgid "English"
+msgstr "engleski"
+
+msgid "Australian English"
+msgstr ""
+
+msgid "British English"
+msgstr "Britanski engleski"
+
+msgid "Esperanto"
+msgstr ""
+
+msgid "Spanish"
+msgstr "španski"
+
+msgid "Argentinian Spanish"
+msgstr "Argentinski španski"
+
+msgid "Colombian Spanish"
+msgstr ""
+
+msgid "Mexican Spanish"
+msgstr "Meksički španski"
+
+msgid "Nicaraguan Spanish"
+msgstr "Nikuaraganski španski"
+
+msgid "Venezuelan Spanish"
+msgstr ""
+
+msgid "Estonian"
+msgstr "estonski"
+
+msgid "Basque"
+msgstr "baskijski"
+
+msgid "Persian"
+msgstr "persijski"
+
+msgid "Finnish"
+msgstr "finski"
+
+msgid "French"
+msgstr "francuski"
+
+msgid "Frisian"
+msgstr "frišanski"
+
+msgid "Irish"
+msgstr "irski"
+
+msgid "Scottish Gaelic"
+msgstr ""
+
+msgid "Galician"
+msgstr "galski"
+
+msgid "Hebrew"
+msgstr "hebrejski"
+
+msgid "Hindi"
+msgstr "hindi"
+
+msgid "Croatian"
+msgstr "hrvatski"
+
+msgid "Upper Sorbian"
+msgstr ""
+
+msgid "Hungarian"
+msgstr "mađarski"
+
+msgid "Armenian"
+msgstr ""
+
+msgid "Interlingua"
+msgstr ""
+
+msgid "Indonesian"
+msgstr "Indonežanski"
+
+msgid "Ido"
+msgstr ""
+
+msgid "Icelandic"
+msgstr "islandski"
+
+msgid "Italian"
+msgstr "italijanski"
+
+msgid "Japanese"
+msgstr "japanski"
+
+msgid "Georgian"
+msgstr "gruzijski"
+
+msgid "Kabyle"
+msgstr ""
+
+msgid "Kazakh"
+msgstr ""
+
+msgid "Khmer"
+msgstr "kambođanski"
+
+msgid "Kannada"
+msgstr "kanada"
+
+msgid "Korean"
+msgstr "korejski"
+
+msgid "Luxembourgish"
+msgstr ""
+
+msgid "Lithuanian"
+msgstr "litvanski"
+
+msgid "Latvian"
+msgstr "latvijski"
+
+msgid "Macedonian"
+msgstr "makedonski"
+
+msgid "Malayalam"
+msgstr "Malajalamski"
+
+msgid "Mongolian"
+msgstr "Mongolski"
+
+msgid "Marathi"
+msgstr ""
+
+msgid "Burmese"
+msgstr ""
+
+msgid "Norwegian Bokmål"
+msgstr ""
+
+msgid "Nepali"
+msgstr ""
+
+msgid "Dutch"
+msgstr "holandski"
+
+msgid "Norwegian Nynorsk"
+msgstr "Norveški novi"
+
+msgid "Ossetic"
+msgstr ""
+
+msgid "Punjabi"
+msgstr "Pandžabi"
+
+msgid "Polish"
+msgstr "poljski"
+
+msgid "Portuguese"
+msgstr "portugalski"
+
+msgid "Brazilian Portuguese"
+msgstr "brazilski portugalski"
+
+msgid "Romanian"
+msgstr "rumunski"
+
+msgid "Russian"
+msgstr "ruski"
+
+msgid "Slovak"
+msgstr "slovački"
+
+msgid "Slovenian"
+msgstr "slovenački"
+
+msgid "Albanian"
+msgstr "albanski"
+
+msgid "Serbian"
+msgstr "srpski"
+
+msgid "Serbian Latin"
+msgstr "srpski latinski"
+
+msgid "Swedish"
+msgstr "švedski"
+
+msgid "Swahili"
+msgstr ""
+
+msgid "Tamil"
+msgstr "tamilski"
+
+msgid "Telugu"
+msgstr "telugu"
+
+msgid "Thai"
+msgstr "tajlandski"
+
+msgid "Turkish"
+msgstr "turski"
+
+msgid "Tatar"
+msgstr ""
+
+msgid "Udmurt"
+msgstr ""
+
+msgid "Ukrainian"
+msgstr "ukrajinski"
+
+msgid "Urdu"
+msgstr "Urdu"
+
+msgid "Uzbek"
+msgstr ""
+
+msgid "Vietnamese"
+msgstr "vijetnamežanski"
+
+msgid "Simplified Chinese"
+msgstr "novokineski"
+
+msgid "Traditional Chinese"
+msgstr "starokineski"
+
+msgid "Messages"
+msgstr ""
+
+msgid "Site Maps"
+msgstr ""
+
+msgid "Static Files"
+msgstr ""
+
+msgid "Syndication"
+msgstr ""
+
+msgid "That page number is not an integer"
+msgstr ""
+
+msgid "That page number is less than 1"
+msgstr ""
+
+msgid "That page contains no results"
+msgstr ""
+
+msgid "Enter a valid value."
+msgstr "Unesite ispravnu vrijednost."
+
+msgid "Enter a valid URL."
+msgstr "Unesite ispravan URL."
+
+msgid "Enter a valid integer."
+msgstr ""
+
+msgid "Enter a valid email address."
+msgstr ""
+
+#. Translators: "letters" means latin letters: a-z and A-Z.
+msgid ""
+"Enter a valid “slug” consisting of letters, numbers, underscores or hyphens."
+msgstr ""
+
+msgid ""
+"Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or "
+"hyphens."
+msgstr ""
+
+msgid "Enter a valid IPv4 address."
+msgstr "Unesite ispravnu IPv4 adresu."
+
+msgid "Enter a valid IPv6 address."
+msgstr ""
+
+msgid "Enter a valid IPv4 or IPv6 address."
+msgstr ""
+
+msgid "Enter only digits separated by commas."
+msgstr "Unesite samo brojke razdvojene zapetama."
+
+#, python-format
+msgid "Ensure this value is %(limit_value)s (it is %(show_value)s)."
+msgstr ""
+"Pobrinite se da je ova vrijednost %(limit_value)s (trenutno je "
+"%(show_value)s)."
+
+#, python-format
+msgid "Ensure this value is less than or equal to %(limit_value)s."
+msgstr "Ova vrijednost mora da bude manja ili jednaka %(limit_value)s."
+
+#, python-format
+msgid "Ensure this value is greater than or equal to %(limit_value)s."
+msgstr "Ova vrijednost mora biti veća ili jednaka %(limit_value)s."
+
+#, python-format
+msgid ""
+"Ensure this value has at least %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at least %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+
+#, python-format
+msgid ""
+"Ensure this value has at most %(limit_value)d character (it has "
+"%(show_value)d)."
+msgid_plural ""
+"Ensure this value has at most %(limit_value)d characters (it has "
+"%(show_value)d)."
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+
+msgid "Enter a number."
+msgstr "Unesite broj."
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s digit in total."
+msgid_plural "Ensure that there are no more than %(max)s digits in total."
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+
+#, python-format
+msgid "Ensure that there are no more than %(max)s decimal place."
+msgid_plural "Ensure that there are no more than %(max)s decimal places."
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+
+#, python-format
+msgid ""
+"Ensure that there are no more than %(max)s digit before the decimal point."
+msgid_plural ""
+"Ensure that there are no more than %(max)s digits before the decimal point."
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+
+#, python-format
+msgid ""
+"File extension “%(extension)s” is not allowed. Allowed extensions are: "
+"%(allowed_extensions)s."
+msgstr ""
+
+msgid "Null characters are not allowed."
+msgstr ""
+
+msgid "and"
+msgstr "i"
+
+#, python-format
+msgid "%(model_name)s with this %(field_labels)s already exists."
+msgstr ""
+
+#, python-format
+msgid "Value %(value)r is not a valid choice."
+msgstr ""
+
+msgid "This field cannot be null."
+msgstr "Ovo polje ne može ostati prazno."
+
+msgid "This field cannot be blank."
+msgstr "Ovo polje ne može biti prazno."
+
+#, python-format
+msgid "%(model_name)s with this %(field_label)s already exists."
+msgstr "%(model_name)s sa ovom vrijednošću %(field_label)s već postoji."
+
+#. Translators: The 'lookup_type' is one of 'date', 'year' or 'month'.
+#. Eg: "Title must be unique for pub_date year"
+#, python-format
+msgid ""
+"%(field_label)s must be unique for %(date_field_label)s %(lookup_type)s."
+msgstr ""
+
+#, python-format
+msgid "Field of type: %(field_type)s"
+msgstr "Polje tipa: %(field_type)s"
+
+#, python-format
+msgid "“%(value)s” value must be either True or False."
+msgstr ""
+
+#, python-format
+msgid "“%(value)s” value must be either True, False, or None."
+msgstr ""
+
+msgid "Boolean (Either True or False)"
+msgstr "Bulova vrijednost (True ili False)"
+
+#, python-format
+msgid "String (up to %(max_length)s)"
+msgstr "String (najviše %(max_length)s znakova)"
+
+msgid "Comma-separated integers"
+msgstr "Cijeli brojevi razdvojeni zapetama"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid date format. It must be in YYYY-MM-DD "
+"format."
+msgstr ""
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD) but it is an invalid "
+"date."
+msgstr ""
+
+msgid "Date (without time)"
+msgstr "Datum (bez vremena)"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in YYYY-MM-DD HH:MM[:ss[."
+"uuuuuu]][TZ] format."
+msgstr ""
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (YYYY-MM-DD HH:MM[:ss[.uuuuuu]]"
+"[TZ]) but it is an invalid date/time."
+msgstr ""
+
+msgid "Date (with time)"
+msgstr "Datum (sa vremenom)"
+
+#, python-format
+msgid "“%(value)s” value must be a decimal number."
+msgstr ""
+
+msgid "Decimal number"
+msgstr "Decimalni broj"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in [DD] [[HH:]MM:]ss[."
+"uuuuuu] format."
+msgstr ""
+
+msgid "Duration"
+msgstr ""
+
+msgid "Email address"
+msgstr "Email adresa"
+
+msgid "File path"
+msgstr "Putanja fajla"
+
+#, python-format
+msgid "“%(value)s” value must be a float."
+msgstr ""
+
+msgid "Floating point number"
+msgstr "Broj sa pokrenom zapetom"
+
+#, python-format
+msgid "“%(value)s” value must be an integer."
+msgstr ""
+
+msgid "Integer"
+msgstr "Cijeo broj"
+
+msgid "Big (8 byte) integer"
+msgstr "Big (8 bajtni) integer"
+
+msgid "IPv4 address"
+msgstr ""
+
+msgid "IP address"
+msgstr "IP adresa"
+
+#, python-format
+msgid "“%(value)s” value must be either None, True or False."
+msgstr ""
+
+msgid "Boolean (Either True, False or None)"
+msgstr "Bulova vrijednost (True, False ili None)"
+
+msgid "Positive integer"
+msgstr ""
+
+msgid "Positive small integer"
+msgstr ""
+
+#, python-format
+msgid "Slug (up to %(max_length)s)"
+msgstr ""
+
+msgid "Small integer"
+msgstr ""
+
+msgid "Text"
+msgstr "Tekst"
+
+#, python-format
+msgid ""
+"“%(value)s” value has an invalid format. It must be in HH:MM[:ss[.uuuuuu]] "
+"format."
+msgstr ""
+
+#, python-format
+msgid ""
+"“%(value)s” value has the correct format (HH:MM[:ss[.uuuuuu]]) but it is an "
+"invalid time."
+msgstr ""
+
+msgid "Time"
+msgstr "Vrijeme"
+
+msgid "URL"
+msgstr "URL"
+
+msgid "Raw binary data"
+msgstr ""
+
+#, python-format
+msgid "“%(value)s” is not a valid UUID."
+msgstr ""
+
+msgid "Universally unique identifier"
+msgstr ""
+
+msgid "File"
+msgstr ""
+
+msgid "Image"
+msgstr ""
+
+#, python-format
+msgid "%(model)s instance with %(field)s %(value)r does not exist."
+msgstr ""
+
+msgid "Foreign Key (type determined by related field)"
+msgstr "Strani ključ (tip određen povezanim poljem)"
+
+msgid "One-to-one relationship"
+msgstr "Jedan-na-jedan odnos"
+
+#, python-format
+msgid "%(from)s-%(to)s relationship"
+msgstr ""
+
+#, python-format
+msgid "%(from)s-%(to)s relationships"
+msgstr ""
+
+msgid "Many-to-many relationship"
+msgstr "Više-na-više odsnos"
+
+#. Translators: If found as last label character, these punctuation
+#. characters will prevent the default label_suffix to be appended to the
+#. label
+msgid ":?.!"
+msgstr ""
+
+msgid "This field is required."
+msgstr "Ovo polje se mora popuniti."
+
+msgid "Enter a whole number."
+msgstr "Unesite cijeo broj."
+
+msgid "Enter a valid date."
+msgstr "Unesite ispravan datum."
+
+msgid "Enter a valid time."
+msgstr "Unesite ispravno vrijeme"
+
+msgid "Enter a valid date/time."
+msgstr "Unesite ispravan datum/vrijeme."
+
+msgid "Enter a valid duration."
+msgstr ""
+
+#, python-brace-format
+msgid "The number of days must be between {min_days} and {max_days}."
+msgstr ""
+
+msgid "No file was submitted. Check the encoding type on the form."
+msgstr "Fajl nije prebačen. Provjerite tip enkodiranja formulara."
+
+msgid "No file was submitted."
+msgstr "Fajl nije prebačen."
+
+msgid "The submitted file is empty."
+msgstr "Prebačen fajl je prazan."
+
+#, python-format
+msgid "Ensure this filename has at most %(max)d character (it has %(length)d)."
+msgid_plural ""
+"Ensure this filename has at most %(max)d characters (it has %(length)d)."
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+
+msgid "Please either submit a file or check the clear checkbox, not both."
+msgstr ""
+
+msgid ""
+"Upload a valid image. The file you uploaded was either not an image or a "
+"corrupted image."
+msgstr ""
+"Prebacite ispravan fajl. Fajl koji je prebačen ili nije slika, ili je "
+"oštećen."
+
+#, python-format
+msgid "Select a valid choice. %(value)s is not one of the available choices."
+msgstr ""
+"%(value)s nije među ponuđenim vrijednostima. Odaberite jednu od ponuđenih."
+
+msgid "Enter a list of values."
+msgstr "Unesite listu vrijednosti."
+
+msgid "Enter a complete value."
+msgstr ""
+
+msgid "Enter a valid UUID."
+msgstr ""
+
+#. Translators: This is the default suffix added to form field labels
+msgid ":"
+msgstr ""
+
+#, python-format
+msgid "(Hidden field %(name)s) %(error)s"
+msgstr ""
+
+msgid "ManagementForm data is missing or has been tampered with"
+msgstr ""
+
+#, python-format
+msgid "Please submit %d or fewer forms."
+msgid_plural "Please submit %d or fewer forms."
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+
+#, python-format
+msgid "Please submit %d or more forms."
+msgid_plural "Please submit %d or more forms."
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+
+msgid "Order"
+msgstr "Redoslijed"
+
+msgid "Delete"
+msgstr "Obriši"
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s."
+msgstr "Ispravite dupli sadržaj za polja: %(field)s."
+
+#, python-format
+msgid "Please correct the duplicate data for %(field)s, which must be unique."
+msgstr ""
+"Ispravite dupli sadržaj za polja: %(field)s, koji mora da bude jedinstven."
+
+#, python-format
+msgid ""
+"Please correct the duplicate data for %(field_name)s which must be unique "
+"for the %(lookup)s in %(date_field)s."
+msgstr ""
+"Ispravite dupli sadržaj za polja: %(field_name)s, koji mora da bude "
+"jedinstven za %(lookup)s u %(date_field)s."
+
+msgid "Please correct the duplicate values below."
+msgstr "Ispravite duple vrijednosti dole."
+
+msgid "The inline value did not match the parent instance."
+msgstr ""
+
+msgid "Select a valid choice. That choice is not one of the available choices."
+msgstr ""
+"Odabrana vrijednost nije među ponuđenima. Odaberite jednu od ponuđenih."
+
+#, python-format
+msgid "“%(pk)s” is not a valid value."
+msgstr ""
+
+#, python-format
+msgid ""
+"%(datetime)s couldn’t be interpreted in time zone %(current_timezone)s; it "
+"may be ambiguous or it may not exist."
+msgstr ""
+
+msgid "Clear"
+msgstr "Očisti"
+
+msgid "Currently"
+msgstr "Trenutno"
+
+msgid "Change"
+msgstr "Izmjeni"
+
+msgid "Unknown"
+msgstr "Nepoznato"
+
+msgid "Yes"
+msgstr "Da"
+
+msgid "No"
+msgstr "Ne"
+
+msgid "Year"
+msgstr ""
+
+msgid "Month"
+msgstr ""
+
+msgid "Day"
+msgstr ""
+
+msgid "yes,no,maybe"
+msgstr "da,ne,možda"
+
+#, python-format
+msgid "%(size)d byte"
+msgid_plural "%(size)d bytes"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+
+#, python-format
+msgid "%s KB"
+msgstr "%s KB"
+
+#, python-format
+msgid "%s MB"
+msgstr "%s MB"
+
+#, python-format
+msgid "%s GB"
+msgstr "%s GB"
+
+#, python-format
+msgid "%s TB"
+msgstr "%s TB"
+
+#, python-format
+msgid "%s PB"
+msgstr "%s PB"
+
+msgid "p.m."
+msgstr "po p."
+
+msgid "a.m."
+msgstr "prije p."
+
+msgid "PM"
+msgstr "PM"
+
+msgid "AM"
+msgstr "AM"
+
+msgid "midnight"
+msgstr "ponoć"
+
+msgid "noon"
+msgstr "podne"
+
+msgid "Monday"
+msgstr "ponedjeljak"
+
+msgid "Tuesday"
+msgstr "utorak"
+
+msgid "Wednesday"
+msgstr "srijeda"
+
+msgid "Thursday"
+msgstr "četvrtak"
+
+msgid "Friday"
+msgstr "petak"
+
+msgid "Saturday"
+msgstr "subota"
+
+msgid "Sunday"
+msgstr "nedjelja"
+
+msgid "Mon"
+msgstr "pon."
+
+msgid "Tue"
+msgstr "uto."
+
+msgid "Wed"
+msgstr "sri."
+
+msgid "Thu"
+msgstr "čet."
+
+msgid "Fri"
+msgstr "pet."
+
+msgid "Sat"
+msgstr "sub."
+
+msgid "Sun"
+msgstr "ned."
+
+msgid "January"
+msgstr "januar"
+
+msgid "February"
+msgstr "februar"
+
+msgid "March"
+msgstr "mart"
+
+msgid "April"
+msgstr "april"
+
+msgid "May"
+msgstr "maj"
+
+msgid "June"
+msgstr "juni"
+
+msgid "July"
+msgstr "juli"
+
+msgid "August"
+msgstr "august"
+
+msgid "September"
+msgstr "septembar"
+
+msgid "October"
+msgstr "oktobar"
+
+msgid "November"
+msgstr "novembar"
+
+msgid "December"
+msgstr "decembar"
+
+msgid "jan"
+msgstr "jan."
+
+msgid "feb"
+msgstr "feb."
+
+msgid "mar"
+msgstr "mar."
+
+msgid "apr"
+msgstr "apr."
+
+msgid "may"
+msgstr "maj."
+
+msgid "jun"
+msgstr "jun."
+
+msgid "jul"
+msgstr "jul."
+
+msgid "aug"
+msgstr "aug."
+
+msgid "sep"
+msgstr "sep."
+
+msgid "oct"
+msgstr "okt."
+
+msgid "nov"
+msgstr "nov."
+
+msgid "dec"
+msgstr "dec."
+
+msgctxt "abbrev. month"
+msgid "Jan."
+msgstr "Jan."
+
+msgctxt "abbrev. month"
+msgid "Feb."
+msgstr "Feb."
+
+msgctxt "abbrev. month"
+msgid "March"
+msgstr "Mart"
+
+msgctxt "abbrev. month"
+msgid "April"
+msgstr "April"
+
+msgctxt "abbrev. month"
+msgid "May"
+msgstr "Maj"
+
+msgctxt "abbrev. month"
+msgid "June"
+msgstr "Juni"
+
+msgctxt "abbrev. month"
+msgid "July"
+msgstr "juli"
+
+msgctxt "abbrev. month"
+msgid "Aug."
+msgstr "august"
+
+msgctxt "abbrev. month"
+msgid "Sept."
+msgstr "septembar"
+
+msgctxt "abbrev. month"
+msgid "Oct."
+msgstr "oktobar"
+
+msgctxt "abbrev. month"
+msgid "Nov."
+msgstr "novembar"
+
+msgctxt "abbrev. month"
+msgid "Dec."
+msgstr "decembar"
+
+msgctxt "alt. month"
+msgid "January"
+msgstr "januar"
+
+msgctxt "alt. month"
+msgid "February"
+msgstr "februar"
+
+msgctxt "alt. month"
+msgid "March"
+msgstr "mart"
+
+msgctxt "alt. month"
+msgid "April"
+msgstr "april"
+
+msgctxt "alt. month"
+msgid "May"
+msgstr "maj"
+
+msgctxt "alt. month"
+msgid "June"
+msgstr "juni"
+
+msgctxt "alt. month"
+msgid "July"
+msgstr "juli"
+
+msgctxt "alt. month"
+msgid "August"
+msgstr "august"
+
+msgctxt "alt. month"
+msgid "September"
+msgstr "septembar"
+
+msgctxt "alt. month"
+msgid "October"
+msgstr "oktobar"
+
+msgctxt "alt. month"
+msgid "November"
+msgstr "Novembar"
+
+msgctxt "alt. month"
+msgid "December"
+msgstr "decembar"
+
+msgid "This is not a valid IPv6 address."
+msgstr ""
+
+#, python-format
+msgctxt "String to return when truncating text"
+msgid "%(truncated_text)s…"
+msgstr ""
+
+msgid "or"
+msgstr "ili"
+
+#. Translators: This string is used as a separator between list elements
+msgid ", "
+msgstr ", "
+
+#, python-format
+msgid "%d year"
+msgid_plural "%d years"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+
+#, python-format
+msgid "%d month"
+msgid_plural "%d months"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+
+#, python-format
+msgid "%d week"
+msgid_plural "%d weeks"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+
+#, python-format
+msgid "%d day"
+msgid_plural "%d days"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+
+#, python-format
+msgid "%d hour"
+msgid_plural "%d hours"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+
+#, python-format
+msgid "%d minute"
+msgid_plural "%d minutes"
+msgstr[0] ""
+msgstr[1] ""
+msgstr[2] ""
+
+msgid "0 minutes"
+msgstr ""
+
+msgid "Forbidden"
+msgstr ""
+
+msgid "CSRF verification failed. Request aborted."
+msgstr ""
+
+msgid ""
+"You are seeing this message because this HTTPS site requires a “Referer "
+"header” to be sent by your Web browser, but none was sent. This header is "
+"required for security reasons, to ensure that your browser is not being "
+"hijacked by third parties."
+msgstr ""
+
+msgid ""
+"If you have configured your browser to disable “Referer” headers, please re-"
+"enable them, at least for this site, or for HTTPS connections, or for “same-"
+"origin” requests."
+msgstr ""
+
+msgid ""
+"If you are using the tag or "
+"including the “Referrer-Policy: no-referrer” header, please remove them. The "
+"CSRF protection requires the “Referer” header to do strict referer checking. "
+"If you’re concerned about privacy, use alternatives like for links to third-party sites."
+msgstr ""
+
+msgid ""
+"You are seeing this message because this site requires a CSRF cookie when "
+"submitting forms. This cookie is required for security reasons, to ensure "
+"that your browser is not being hijacked by third parties."
+msgstr ""
+
+msgid ""
+"If you have configured your browser to disable cookies, please re-enable "
+"them, at least for this site, or for “same-origin” requests."
+msgstr ""
+
+msgid "More information is available with DEBUG=True."
+msgstr ""
+
+msgid "No year specified"
+msgstr "Godina nije naznačena"
+
+msgid "Date out of range"
+msgstr ""
+
+msgid "No month specified"
+msgstr "Mjesec nije naznačen"
+
+msgid "No day specified"
+msgstr "Dan nije naznačen"
+
+msgid "No week specified"
+msgstr "Sedmica nije naznačena"
+
+#, python-format
+msgid "No %(verbose_name_plural)s available"
+msgstr ""
+
+#, python-format
+msgid ""
+"Future %(verbose_name_plural)s not available because %(class_name)s."
+"allow_future is False."
+msgstr ""
+
+#, python-format
+msgid "Invalid date string “%(datestr)s” given format “%(format)s”"
+msgstr ""
+
+#, python-format
+msgid "No %(verbose_name)s found matching the query"
+msgstr ""
+
+msgid "Page is not “last”, nor can it be converted to an int."
+msgstr ""
+
+#, python-format
+msgid "Invalid page (%(page_number)s): %(message)s"
+msgstr ""
+
+#, python-format
+msgid "Empty list and “%(class_name)s.allow_empty” is False."
+msgstr ""
+
+msgid "Directory indexes are not allowed here."
+msgstr ""
+
+#, python-format
+msgid "“%(path)s” does not exist"
+msgstr ""
+
+#, python-format
+msgid "Index of %(directory)s"
+msgstr ""
+
+msgid "Django: the Web framework for perfectionists with deadlines."
+msgstr ""
+
+#, python-format
+msgid ""
+"View release notes for Django %(version)s"
+msgstr ""
+
+msgid "The install worked successfully! Congratulations!"
+msgstr ""
+
+#, python-format
+msgid ""
+"You are seeing this page because DEBUG=True is in your settings file and you have not configured any "
+"URLs."
+msgstr ""
+
+msgid "Django Documentation"
+msgstr ""
+
+msgid "Topics, references, & how-to’s"
+msgstr ""
+
+msgid "Tutorial: A Polling App"
+msgstr ""
+
+msgid "Get started with Django"
+msgstr ""
+
+msgid "Django Community"
+msgstr ""
+
+msgid "Connect, get help, or contribute"
+msgstr ""
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bs/__init__.py b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bs/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bs/formats.py b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bs/formats.py
new file mode 100644
index 00000000..a15e7099
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/bs/formats.py
@@ -0,0 +1,21 @@
+# This file is distributed under the same license as the Django package.
+#
+# The *_FORMAT strings use the Django date format syntax,
+# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
+DATE_FORMAT = "j. N Y."
+TIME_FORMAT = "G:i"
+DATETIME_FORMAT = "j. N. Y. G:i T"
+YEAR_MONTH_FORMAT = "F Y."
+MONTH_DAY_FORMAT = "j. F"
+SHORT_DATE_FORMAT = "Y M j"
+# SHORT_DATETIME_FORMAT =
+# FIRST_DAY_OF_WEEK =
+
+# The *_INPUT_FORMATS strings use the Python strftime format syntax,
+# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
+# DATE_INPUT_FORMATS =
+# TIME_INPUT_FORMATS =
+# DATETIME_INPUT_FORMATS =
+DECIMAL_SEPARATOR = ","
+THOUSAND_SEPARATOR = "."
+# NUMBER_GROUPING =
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ca/LC_MESSAGES/django.mo b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ca/LC_MESSAGES/django.mo
new file mode 100644
index 00000000..208f4a4e
Binary files /dev/null and b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ca/LC_MESSAGES/django.mo differ
diff --git a/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ca/LC_MESSAGES/django.po b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ca/LC_MESSAGES/django.po
new file mode 100644
index 00000000..01e4dda2
--- /dev/null
+++ b/Voltage Vikings/lib/python3.9/site-packages/django/conf/locale/ca/LC_MESSAGES/django.po
@@ -0,0 +1,1340 @@
+# This file is distributed under the same license as the Django package.
+#
+# Translators:
+# Antoni Aloy , 2012,2015-2017,2021-2022
+# Carles Barrobés , 2011-2012,2014,2020
+# duub qnnp, 2015
+# Emilio Carrion, 2022
+# Gil Obradors Via , 2019
+# Gil Obradors Via , 2019
+# Jannis Leidel , 2011
+# Manel Clos , 2020
+# Manuel Miranda , 2015
+# Mariusz Felisiak